Automatize Tutorials html, py files creation (#496)

* workflow to export tutorials

---------
This commit is contained in:
Dario Coscia
2025-03-15 11:01:19 +01:00
committed by Nicola Demo
parent aea24d0bee
commit 0146155c9b
51 changed files with 140529 additions and 440 deletions

View File

@@ -24,12 +24,13 @@
"outputs": [],
"source": [
"try:\n",
" import google.colab\n",
" IN_COLAB = True\n",
" import google.colab\n",
"\n",
" IN_COLAB = True\n",
"except:\n",
" IN_COLAB = False\n",
" IN_COLAB = False\n",
"if IN_COLAB:\n",
" !pip install \"pina-mathlab\"\n",
" !pip install \"pina-mathlab\"\n",
"\n",
"import torch\n",
"import warnings\n",
@@ -42,7 +43,7 @@
"from pina.domain import CartesianDomain\n",
"from pina.equation import Equation, FixedValue\n",
"\n",
"warnings.filterwarnings('ignore')"
"warnings.filterwarnings(\"ignore\")"
]
},
{
@@ -70,6 +71,7 @@
" # calculate the residual and return it\n",
" return u_x - u\n",
"\n",
"\n",
"class SimpleODE(SpatialProblem):\n",
"\n",
" output_variables = [\"u\"]\n",
@@ -101,7 +103,7 @@
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables)\n",
" input_dimensions=len(problem.input_variables),\n",
")\n",
"\n",
"# create the PINN object\n",
@@ -437,20 +439,22 @@
],
"source": [
"model = FeedForward(\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables)\n",
" )\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables),\n",
")\n",
"pinn = PINN(problem, model)\n",
"trainer = Trainer(solver=pinn,\n",
" accelerator='cpu',\n",
" logger=True,\n",
" callbacks=[NaiveMetricTracker()], # adding a callbacks\n",
" enable_model_summary=False,\n",
" train_size=1.0,\n",
" val_size=0.0,\n",
" test_size=0.0)\n",
"trainer = Trainer(\n",
" solver=pinn,\n",
" accelerator=\"cpu\",\n",
" logger=True,\n",
" callbacks=[NaiveMetricTracker()], # adding a callbacks\n",
" enable_model_summary=False,\n",
" train_size=1.0,\n",
" val_size=0.0,\n",
" test_size=0.0,\n",
")\n",
"trainer.train()"
]
},
@@ -486,7 +490,7 @@
}
],
"source": [
"trainer.callbacks[0].saved_metrics[:3] # only the first three epochs"
"trainer.callbacks[0].saved_metrics[:3] # only the first three epochs"
]
},
{
@@ -615,19 +619,21 @@
"seed_everything(42, workers=True)\n",
"\n",
"model = FeedForward(\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables)\n",
" )\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables),\n",
")\n",
"\n",
"pinn = PINN(problem, model)\n",
"trainer = Trainer(solver=pinn,\n",
" accelerator='cpu',\n",
" deterministic=True, # setting deterministic=True ensure reproducibility when a seed is imposed\n",
" max_epochs = 2000,\n",
" enable_model_summary=False,\n",
" callbacks=[Timer()]) # adding a callbacks\n",
"trainer = Trainer(\n",
" solver=pinn,\n",
" accelerator=\"cpu\",\n",
" deterministic=True, # setting deterministic=True ensure reproducibility when a seed is imposed\n",
" max_epochs=2000,\n",
" enable_model_summary=False,\n",
" callbacks=[Timer()],\n",
") # adding a callbacks\n",
"trainer.train()\n",
"print(f'Total training time {trainer.callbacks[0].time_elapsed(\"train\"):.5f} s')"
]
@@ -698,19 +704,20 @@
"seed_everything(42, workers=True)\n",
"\n",
"model = FeedForward(\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables)\n",
" )\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables),\n",
")\n",
"pinn = PINN(problem, model)\n",
"trainer = Trainer(solver=pinn,\n",
" accelerator='cpu',\n",
" deterministic=True,\n",
" max_epochs = 2000,\n",
" enable_model_summary=False,\n",
" callbacks=[Timer(),\n",
" StochasticWeightAveraging(swa_lrs=0.005)]) # adding StochasticWeightAveraging callbacks\n",
"trainer = Trainer(\n",
" solver=pinn,\n",
" accelerator=\"cpu\",\n",
" deterministic=True,\n",
" max_epochs=2000,\n",
" enable_model_summary=False,\n",
" callbacks=[Timer(), StochasticWeightAveraging(swa_lrs=0.005)],\n",
") # adding StochasticWeightAveraging callbacks\n",
"trainer.train()\n",
"print(f'Total training time {trainer.callbacks[0].time_elapsed(\"train\"):.5f} s')"
]
@@ -783,19 +790,20 @@
"seed_everything(42, workers=True)\n",
"\n",
"model = FeedForward(\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables)\n",
" )\n",
" layers=[10, 10],\n",
" func=torch.nn.Tanh,\n",
" output_dimensions=len(problem.output_variables),\n",
" input_dimensions=len(problem.input_variables),\n",
")\n",
"pinn = PINN(problem, model)\n",
"trainer = Trainer(solver=pinn,\n",
" accelerator='cpu',\n",
" max_epochs = 2000,\n",
" enable_model_summary=False,\n",
" gradient_clip_val=0.1, # clipping the gradient\n",
" callbacks=[Timer(),\n",
" StochasticWeightAveraging(swa_lrs=0.005)])\n",
"trainer = Trainer(\n",
" solver=pinn,\n",
" accelerator=\"cpu\",\n",
" max_epochs=2000,\n",
" enable_model_summary=False,\n",
" gradient_clip_val=0.1, # clipping the gradient\n",
" callbacks=[Timer(), StochasticWeightAveraging(swa_lrs=0.005)],\n",
")\n",
"trainer.train()\n",
"print(f'Total training time {trainer.callbacks[0].time_elapsed(\"train\"):.5f} s')"
]