diff --git a/latent.ipynb b/latent.ipynb
index c20e337..f2aa4b4 100644
--- a/latent.ipynb
+++ b/latent.ipynb
@@ -1,15 +1,5 @@
{
"cells": [
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "view-in-github"
- },
- "source": [
- "
"
- ]
- },
{
"cell_type": "markdown",
"metadata": {
@@ -53,28 +43,39 @@
"#@markdown Enable saving models to Google Drive to avoid downloading the model every Colab instance\n",
"save_models_to_google_drive = True #@param {type:\"boolean\"}\n",
"\n",
- "if save_outputs_to_google_drive or save_models_to_google_drive:\n",
- " from google.colab import drive\n",
- " try:\n",
- " drive.mount('/content/gdrive')\n",
- " except:\n",
- " save_outputs_to_google_drive = False\n",
- " save_models_to_google_drive = False\n",
- "\n",
- "model_path = \"/content/gdrive/MyDrive/AI/models\" if save_models_to_google_drive else \"/content/\"\n",
- "outputs_path = \"/content/gdrive/MyDrive/AI/latent_majesty_diffusion\" if save_outputs_to_google_drive else \"/content/outputs\"\n",
- "!mkdir -p $model_path\n",
- "!mkdir -p $outputs_path\n",
- "print(f\"Model will be stored at {model_path}\")\n",
- "print(f\"Outputs will be saved to {outputs_path}\")\n",
+ "try:\n",
+ " import google.colab\n",
+ "except ModuleNotFoundError:\n",
+ " is_local = True\n",
+ "else:\n",
+ " is_local = False\n",
+ "\n",
+ "if not is_local:\n",
+ " if save_outputs_to_google_drive or save_models_to_google_drive:\n",
+ " from google.colab import drive\n",
+ " try:\n",
+ " drive.mount('/content/gdrive')\n",
+ " except:\n",
+ " save_outputs_to_google_drive = False\n",
+ " save_models_to_google_drive = False\n",
+ "\n",
+ " model_path = \"/content/gdrive/MyDrive/AI/models\" if save_models_to_google_drive else \"/content/\"\n",
+ " outputs_path = \"/content/gdrive/MyDrive/AI/latent_majesty_diffusion\" if save_outputs_to_google_drive else \"/content/outputs\"\n",
+ " !mkdir -p $model_path\n",
+ " !mkdir -p $outputs_path\n",
+ " print(f\"Model will be stored at {model_path}\")\n",
+ " print(f\"Outputs will be saved to {outputs_path}\")\n",
"\n",
"#If you want to run it locally change it to true\n",
- "is_local = False\n",
"skip_installs = False\n",
"if(is_local):\n",
" model_path = \"/choose/your/local/model/path\"\n",
" outputs_path = \"/choose/your/local/outputs/path\"\n",
- " skip_installs = True"
+ " # TODO: Make an install script so local run\n",
+ " # doesn't need to rely on the notebook for installation\n",
+ " # skip_installs = True\n",
+ " save_outputs_to_google_drive = False\n",
+ " save_models_to_google_drive = False"
]
},
{
@@ -119,11 +120,21 @@
" downgrade_pytorch_result = subprocess.run(['pip', 'install', 'torch==1.10.2', 'torchvision==0.11.3', '-q'], stdout=subprocess.PIPE).stdout.decode('utf-8')\n",
" import sys\n",
" sys.path.append(\".\")\n",
+ " # TODO: If latent-diffusion is already pip installed, check that it is the correct version\n",
" !git clone https://github.com/multimodalart/latent-diffusion\n",
" !git clone https://github.com/CompVis/taming-transformers\n",
" !git clone https://github.com/TencentARC/GFPGAN\n",
" !git clone https://github.com/multimodalart/majesty-diffusion\n",
- " !git lfs clone https://github.com/LAION-AI/aesthetic-predictor\n",
+ " if is_local:\n",
+ " # \"WARNING: 'git lfs clone' is deprecated and will not be updated\n",
+ " # with new flags from 'git clone'\n",
+ " # 'git clone' has been updated in upstream Git to have comparable\n",
+ " # speeds to 'git lfs clone'.\"\n",
+ "\n",
+ " # Local systems will not necessarily have git-lfs, so\n",
+ " !git clone https://github.com/LAION-AI/aesthetic-predictor\n",
+ " else:\n",
+ " !git lfs clone https://github.com/LAION-AI/aesthetic-predictor\n",
" !pip install -e ./taming-transformers\n",
" !pip install omegaconf>=2.0.0 pytorch-lightning>=1.0.8 torch-fidelity einops\n",
" !pip install transformers\n",
@@ -139,6 +150,7 @@
" from taming.models import vqgan\n",
" from subprocess import Popen, PIPE\n",
" try:\n",
+ " # TODO: Check that installed mmc is gradient_checkpointing branch\n",
" import mmc\n",
" except:\n",
" # install mmc\n",
@@ -1216,13 +1228,6 @@
"experimental_aesthetic_embeddings_score = 9"
]
},
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "ZUu_pyTkuxiT"
- },
- "source": []
- },
{
"cell_type": "markdown",
"metadata": {
@@ -1314,7 +1319,7 @@
" custom_settings = f'majesty-diffusion/latent_settings_library/{settings_library}.cfg'\n",
"\n",
"global_var_scope = globals()\n",
- "if(custom_settings is not None and custom_settings is not '' and custom_settings != 'path/to/settings.cfg'):\n",
+ "if(custom_settings is not None and custom_settings != '' and custom_settings != 'path/to/settings.cfg'):\n",
" print('Loaded ', custom_settings)\n",
" try:\n",
" from configparser import ConfigParser\n",
@@ -1345,7 +1350,7 @@
" for advanced_setting in advanced_settings:\n",
" global_var_scope[advanced_setting[0]] = eval(advanced_setting[1])\n",
"\n",
- "if(((init_image is not None) or (init_image != 'None') or (init_image != '')) and starting_timestep is not 1 and custom_schedule_setting[0][1] == 1000):\n",
+ "if(((init_image is not None) or (init_image != 'None') or (init_image != '')) and starting_timestep != 1 and custom_schedule_setting[0][1] == 1000):\n",
" custom_schedule_setting[0] = [custom_schedule_setting[0][0], custom_schedule_setting[0][1]*starting_timestep, custom_schedule_setting[0][2]]\n",
"\n",
"prompts = clip_prompts\n",
@@ -1445,13 +1450,23 @@
"provenance": []
},
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
"name": "python3"
},
"language_info": {
- "name": "python"
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.12"
}
},
"nbformat": 4,
"nbformat_minor": 0
-}
+}
\ No newline at end of file
diff --git a/v.ipynb b/v.ipynb
index ecb8c21..14eca98 100644
--- a/v.ipynb
+++ b/v.ipynb
@@ -1,15 +1,5 @@
{
"cells": [
- {
- "cell_type": "markdown",
- "metadata": {
- "colab_type": "text",
- "id": "view-in-github"
- },
- "source": [
- "
"
- ]
- },
{
"cell_type": "markdown",
"metadata": {
@@ -51,7 +41,7 @@
"execution_count": null,
"metadata": {
"cellView": "form",
- "id": "AHukAUBzBxZf"
+ "id": "YjHdHnfVJewN"
},
"outputs": [],
"source": [
@@ -60,28 +50,39 @@
"#@markdown Enable saving models to Google Drive to avoid downloading the model every Colab instance\n",
"save_models_to_google_drive = True #@param {type:\"boolean\"}\n",
"\n",
- "if save_outputs_to_google_drive or save_models_to_google_drive:\n",
- " from google.colab import drive\n",
- " try:\n",
- " drive.mount('/content/gdrive')\n",
- " except:\n",
- " save_outputs_to_google_drive = False\n",
- " save_models_to_google_drive = False\n",
- "\n",
- "model_path = \"/content/gdrive/MyDrive/AI/models\" if save_models_to_google_drive else \"/content/\"\n",
- "outputs_path = \"/content/gdrive/MyDrive/AI/v-majesty-diffusion\" if save_outputs_to_google_drive else \"/content/outputs\"\n",
- "!mkdir -p $model_path\n",
- "!mkdir -p $outputs_path\n",
- "print(f\"Model will be stored at {model_path}\")\n",
- "print(f\"Outputs will be saved to {outputs_path}\")\n",
+ "try:\n",
+ " import google.colab\n",
+ "except ModuleNotFoundError:\n",
+ " is_local = True\n",
+ "else:\n",
+ " is_local = False\n",
+ "\n",
+ "if not is_local:\n",
+ " if save_outputs_to_google_drive or save_models_to_google_drive:\n",
+ " from google.colab import drive\n",
+ " try:\n",
+ " drive.mount('/content/gdrive')\n",
+ " except:\n",
+ " save_outputs_to_google_drive = False\n",
+ " save_models_to_google_drive = False\n",
+ "\n",
+ " model_path = \"/content/gdrive/MyDrive/AI/models\" if save_models_to_google_drive else \"/content/\"\n",
+ " outputs_path = \"/content/gdrive/MyDrive/AI/latent_majesty_diffusion\" if save_outputs_to_google_drive else \"/content/outputs\"\n",
+ " !mkdir -p $model_path\n",
+ " !mkdir -p $outputs_path\n",
+ " print(f\"Model will be stored at {model_path}\")\n",
+ " print(f\"Outputs will be saved to {outputs_path}\")\n",
"\n",
"#If you want to run it locally change it to true\n",
- "is_local = False\n",
"skip_installs = False\n",
"if(is_local):\n",
" model_path = \"/choose/your/local/model/path\"\n",
" outputs_path = \"/choose/your/local/outputs/path\"\n",
- " skip_installs = True"
+ " # TODO: Make an install script so local run\n",
+ " # doesn't need to rely on the notebook for installation\n",
+ " # skip_installs = True\n",
+ " save_outputs_to_google_drive = False\n",
+ " save_models_to_google_drive = False"
]
},
{
@@ -115,7 +116,16 @@
" !git clone https://github.com/crowsonkb/v-diffusion-pytorch\n",
" !git clone https://github.com/crowsonkb/guided-diffusion\n",
" !git clone https://github.com/multimodalart/majesty-diffusion\n",
- " !git lfs clone https://github.com/LAION-AI/aesthetic-predictor\n",
+ " if is_local:\n",
+ " # \"WARNING: 'git lfs clone' is deprecated and will not be updated\n",
+ " # with new flags from 'git clone'\n",
+ " # 'git clone' has been updated in upstream Git to have comparable\n",
+ " # speeds to 'git lfs clone'.\"\n",
+ "\n",
+ " # Local systems will not necessarily have git-lfs, so\n",
+ " !git clone https://github.com/LAION-AI/aesthetic-predictor\n",
+ " else:\n",
+ " !git lfs clone https://github.com/LAION-AI/aesthetic-predictor\n",
" sys.path.append('./guided-diffusion')\n",
" !pip install omegaconf>=2.0.0 pytorch-lightning>=1.0.8 torch-fidelity einops\n",
" !pip install resize-right\n",
@@ -125,6 +135,7 @@
" !pip install fairscale\n",
" from subprocess import Popen, PIPE\n",
" try:\n",
+ " # TODO: Check that installed mmc is gradient_checkpointing branch\n",
" import mmc\n",
" except:\n",
" # install mmc\n",
@@ -608,8 +619,7 @@
"execution_count": null,
"metadata": {
"cellView": "form",
- "id": "Fpbody2NCR7w",
- "scrolled": false
+ "id": "Fpbody2NCR7w"
},
"outputs": [],
"source": [
@@ -774,8 +784,7 @@
"execution_count": null,
"metadata": {
"cellView": "form",
- "id": "VnQjGugaDZPJ",
- "scrolled": false
+ "id": "VnQjGugaDZPJ"
},
"outputs": [],
"source": [
@@ -1588,8 +1597,7 @@
"execution_count": null,
"metadata": {
"cellView": "form",
- "id": "X5gODNAMEUCR",
- "scrolled": false
+ "id": "X5gODNAMEUCR"
},
"outputs": [],
"source": [
@@ -1664,7 +1672,7 @@
" else:\n",
" custom_settings = f'majesty-diffusion/v_settings_library/{settings_library}.cfg'\n",
"\n",
- "is_custom_settings = (custom_settings is not None and custom_settings is not '' and custom_settings != 'path/to/settings.cfg')\n",
+ "is_custom_settings = (custom_settings is not None and custom_settings != '' and custom_settings != 'path/to/settings.cfg')\n",
"\n",
"#Reload the user selected models after an upscale or after they remove a settings file\n",
"if(has_upscaled or (has_loaded_custom and not is_custom_settings)):\n",
@@ -1833,7 +1841,7 @@
"hash": "9e3236fd3f990fb8325876dc599ef7209db0a0fa116cc6a98c96d08faccfbfa5"
},
"kernelspec": {
- "display_name": "Python 3.9.12 ('multimodal')",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -1857,4 +1865,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
-}
+}
\ No newline at end of file