diff --git a/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.ipynb b/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.ipynb index 62de8796..1cadad2c 100644 --- a/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.ipynb +++ b/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.ipynb @@ -818,7 +818,7 @@ "use_pretrained = False\n", "\n", "if use_pretrained:\n", - " model = torch.hub.load(\"marksgraham/pretrained_generative_models\", model=\"ddpm_2d\", verbose=True).to(device)\n", + " model = torch.hub.load(\"marksgraham/pretrained_generative_models:v0.2\", model=\"ddpm_2d\", verbose=True).to(device)\n", "else:\n", " n_epochs = 100\n", " val_interval = 10\n", @@ -1096,7 +1096,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.8.13" } }, "nbformat": 4, diff --git a/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.py b/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.py index a6dfe00b..dee1bed2 100644 --- a/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.py +++ b/tutorials/generative/2d_ddpm/2d_ddpm_compare_schedulers.py @@ -6,7 +6,7 @@ # extension: .py # format_name: percent # format_version: '1.3' -# jupytext_version: 1.14.4 +# jupytext_version: 1.14.1 # kernelspec: # display_name: Python 3 (ipykernel) # language: python @@ -207,7 +207,7 @@ use_pretrained = False if use_pretrained: - model = torch.hub.load("marksgraham/pretrained_generative_models", model="ddpm_2d", verbose=True).to(device) + model = torch.hub.load("marksgraham/pretrained_generative_models:v0.2", model="ddpm_2d", verbose=True).to(device) else: n_epochs = 100 val_interval = 10 diff --git a/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.ipynb b/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.ipynb index 21d5e83f..1fc3f9f0 100644 --- a/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.ipynb +++ b/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.ipynb @@ -636,7 +636,7 @@ "use_pretrained = False\n", "\n", "if use_pretrained:\n", - " model = torch.hub.load(\"marksgraham/pretrained_generative_models\", model=\"ddpm_2d\", verbose=True).to(device)\n", + " model = torch.hub.load(\"marksgraham/pretrained_generative_models:v0.2\", model=\"ddpm_2d\", verbose=True).to(device)\n", "else:\n", " n_epochs = 50\n", " val_interval = 5\n", @@ -914,7 +914,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.8.13" } }, "nbformat": 4, diff --git a/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.py b/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.py index ea6a1f8f..58874039 100644 --- a/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.py +++ b/tutorials/generative/2d_ddpm/2d_ddpm_inpainting.py @@ -6,7 +6,7 @@ # extension: .py # format_name: percent # format_version: '1.3' -# jupytext_version: 1.14.4 +# jupytext_version: 1.14.1 # kernelspec: # display_name: Python 3 (ipykernel) # language: python @@ -191,7 +191,7 @@ use_pretrained = False if use_pretrained: - model = torch.hub.load("marksgraham/pretrained_generative_models", model="ddpm_2d", verbose=True).to(device) + model = torch.hub.load("marksgraham/pretrained_generative_models:v0.2", model="ddpm_2d", verbose=True).to(device) else: n_epochs = 50 val_interval = 5 diff --git a/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.ipynb b/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.ipynb index 673170a8..40595441 100644 --- a/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.ipynb +++ b/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.ipynb @@ -42,6 +42,7 @@ "execution_count": 2, "id": "dd62a552", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -137,6 +138,7 @@ "execution_count": 3, "id": "8fc58c80", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -169,6 +171,7 @@ "execution_count": 4, "id": "ad5a1948", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -194,6 +197,7 @@ "execution_count": 5, "id": "65e1c200", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -232,6 +236,7 @@ "execution_count": 6, "id": "e2f9bebd", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -271,6 +276,7 @@ "execution_count": 7, "id": "938318c2", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -320,6 +326,7 @@ "execution_count": 8, "id": "b698f4f8", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -372,6 +379,7 @@ "execution_count": 9, "id": "2c52e4f4", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false }, @@ -415,6 +423,7 @@ "execution_count": 10, "id": "0f697a13", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false }, @@ -763,7 +772,7 @@ "use_pretrained = False\n", "\n", "if use_pretrained:\n", - " model = torch.hub.load(\"marksgraham/pretrained_generative_models\", model=\"ddpm_2d\", verbose=True).to(device)\n", + " model = torch.hub.load(\"marksgraham/pretrained_generative_models:v0.2\", model=\"ddpm_2d\", verbose=True).to(device)\n", "else:\n", " n_epochs = 75\n", " val_interval = 5\n", @@ -852,6 +861,7 @@ "execution_count": 11, "id": "2cdcda81", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -901,6 +911,7 @@ "execution_count": 12, "id": "1427e5d4", "metadata": { + "collapsed": false, "jupyter": { "outputs_hidden": false } @@ -984,7 +995,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.8.13" } }, "nbformat": 4, diff --git a/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.py b/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.py index 0384d33e..2d81ddb6 100644 --- a/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.py +++ b/tutorials/generative/2d_ddpm/2d_ddpm_tutorial.py @@ -6,7 +6,7 @@ # extension: .py # format_name: percent # format_version: '1.3' -# jupytext_version: 1.14.4 +# jupytext_version: 1.14.1 # kernelspec: # display_name: Python 3 (ipykernel) # language: python @@ -190,7 +190,7 @@ use_pretrained = False if use_pretrained: - model = torch.hub.load("marksgraham/pretrained_generative_models", model="ddpm_2d", verbose=True).to(device) + model = torch.hub.load("marksgraham/pretrained_generative_models:v0.2", model="ddpm_2d", verbose=True).to(device) else: n_epochs = 75 val_interval = 5 diff --git a/tutorials/generative/distributed_training/ddpm_training_ddp.py b/tutorials/generative/distributed_training/ddpm_training_ddp.py index f111d52a..07fab1b0 100644 --- a/tutorials/generative/distributed_training/ddpm_training_ddp.py +++ b/tutorials/generative/distributed_training/ddpm_training_ddp.py @@ -197,7 +197,7 @@ def main_worker(args): inferer = DiffusionInferer(scheduler) # wrap the model with DistributedDataParallel module - model = DistributedDataParallel(model, device_ids=[device]) + model = DistributedDataParallel(model, device_ids=[device], find_unused_parameters=True) # start a typical PyTorch training best_metric = 10000