Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

a minimum refactor of optimizer and design region #2299

Draft
wants to merge 1 commit into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/faq
Submodule faq updated 31 files
+9 −9 _faqs/Can-you-convert-a-lumerical-script-file-to-Tidy3D.md
+64 −64 _faqs/How-are-results-normalized.md
+13 −13 _faqs/How-do-I-include-material-dispersion.md
+9 −9 _faqs/How-do-I-run-a-simulation-and-access-the-results.md
+7 −7 _faqs/How-is-using-Tidy3D-billed.md
+11 −11 _faqs/Should-I-make-sure-that-fields-have-fully-decayed-by-the-end-of-the-simulation.md
+39 −39 _faqs/What-are-the-units-used-in-the-simulation.md
+20 −20 _faqs/What-do-I-need-to-know-about-the-numerical-grid.md
+14 −14 _faqs/Why-can-I-not-change-Tidy3D-instances-after-they-are-created.md
+8 −8 _faqs/Why-did-my-simulation-finish-early.md
+6 −6 _faqs/Why-does-the-kernel-crash-sometimes-when-using-the-web-based-python-notebook.md
+19 −19 _faqs/Why-is-a-simulation-diverging.md
+1 −1 docs/about-tidy3d.rst
+0 −10 docs/faq/can-you-convert-a-lumerical-script-file-to-tidy3d.md
+0 −45 docs/faq/how-are-results-normalized.md
+0 −14 docs/faq/how-do-i-include-material-dispersion.md
+0 −10 docs/faq/how-do-i-run-a-simulation-and-access-the-results.md
+0 −8 docs/faq/how-is-using-tidy3d-billed.md
+0 −12 docs/faq/should-i-make-sure-that-fields-have-fully-decayed-by-the-end-of-the-simulation.md
+0 −43 docs/faq/what-are-the-units-used-in-the-simulation.md
+0 −22 docs/faq/what-do-i-need-to-know-about-the-numerical-grid.md
+0 −15 docs/faq/why-can-i-not-change-tidy3d-instances-after-they-are-created.md
+0 −9 docs/faq/why-did-my-simulation-finish-early.md
+0 −8 docs/faq/why-does-the-kernel-crash-sometimes-when-using-the-web-based-python-notebook.md
+0 −20 docs/faq/why-is-a-simulation-diverging.md
+1 −1 docs/grid-specification.rst
+1 −1 docs/mediums.rst
+4 −4 docs/simulation-troubleshoot.rst
+3 −3 docs/simulations.rst
+1 −1 docs/sources.rst
+11 −11 faq_categories.json
2 changes: 1 addition & 1 deletion docs/notebooks
Submodule notebooks updated 76 files
+0 −1,023 AbsorbingBoundaryReflection.ipynb
+110 −110 AllDielectricStructuralColor.ipynb
+0 −1,323 AnisotropicMetamaterialBroadbandPBS.ipynb
+0 −689 AntennaCharacteristics.ipynb
+294 −78 AutoGrid.ipynb
+342 −1,238 Autograd10YBranchLevelSet.ipynb
+12 −26 Autograd13Metasurface.ipynb
+9 −20 Autograd15Antenna.ipynb
+66 −111 Autograd16BilayerCoupler.ipynb
+23 −24 Autograd17BandPassFilter.ipynb
+41 −50 Autograd18TopologyBend.ipynb
+97 −127 Autograd19ApodizedCoupler.ipynb
+2 −2 Autograd1Intro.ipynb
+18 −35 Autograd20MetalensWaveguideTaper.ipynb
+3 −11 Autograd21GaPLightExtractor.ipynb
+63 −58 Autograd22PhotonicCrystal.ipynb
+24 −14 Autograd23FabricationAwareInvdes.ipynb
+14 −23 Autograd3InverseDesign.ipynb
+9 −15 Autograd5BoundaryGradients.ipynb
+11 −26 Autograd6GratingCoupler.ipynb
+43 −9 Autograd8WaveguideBend.ipynb
+28,335 −32,880 Autograd9WDM.ipynb
+38 −98 BatchModeSolver.ipynb
+838 −208 BroadbandPlaneWaveWithConstantObliqueIncidentAngle.ipynb
+0 −1,873 ChargeSolver.ipynb
+0 −1,566 DirectionalCouplerSurrogate.ipynb
+0 −3,969 EMEBends.ipynb
+1,029 −2,306 EffectiveIndexApproximation.ipynb
+7 −1 GratingEfficiency.ipynb
+491 −270 HeatSolver.ipynb
+159 −779 IntegratedVivaldiAntenna.ipynb
+2 −2 InverseDesign.ipynb
+1,548 −355 LNOIPolarizationSplitterRotator.ipynb
+0 −663 LayerRefinement.ipynb
+68 −441 LinearLumpedElements.ipynb
+0 −498 MMIMeepBenchmark.ipynb
+1,392 −2,054 MachZehnderModulator.ipynb
+16,303 −308 Metalens.ipynb
+0 −509 ModeOverlap.ipynb
+8 −1 Near2FarSphereRCS.ipynb
+0 −515 PECSphereRCS.ipynb
+9 −1 PlasmonicNanoparticle.ipynb
+0 −696 RadarAbsorbingMetamaterial.ipynb
+103 −290 TaperedWgDispersion.ipynb
+267 −139 ThermallyTunedRingResonator.ipynb
+1 −0 docs/case_studies/metamaterials_gratings_periodic.rst
+1 −2 docs/case_studies/microwave.rst
+0 −1 docs/case_studies/pic.rst
+2 −0 docs/case_studies/scattering_far_field.rst
+1 −2 docs/features/advanced.rst
+0 −1 docs/features/autograd.rst
+0 −14 docs/features/benchmark.rst
+0 −10 docs/features/charge.rst
+0 −1 docs/features/eme.rst
+1 −2 docs/features/grid.rst
+1 −1 docs/features/heat.rst
+0 −3 docs/features/index.rst
+1 −2 docs/features/microwave.rst
+0 −1 docs/features/mode.rst
+1 −2 docs/features/symmetry.rst
+ img/EMEBends.png
+ img/MeepMMIBenchmark.png
+ img/PatchAntenna.png
+ img/RadarAbsorberMetasurface.png
+ img/animation_wdm_autograd.gif
+ img/charge_modulator.png
+0 −87 img/directional_coupler_surrogate.svg
+ img/effective_index_approximation.png
+ img/pbs_swg_am.png
+ img/pbs_swg_am_top.png
+ img/single-cell-lumped-element.png
+0 −50 misc/MeepMMI.txt
+0 −2 misc/import_file_mapping.json
+ misc/inv_des_wdm_ag.gds
+ misc/inv_des_ybranch.gds
+ misc/y_branch_fab.pkl
6 changes: 3 additions & 3 deletions tidy3d/plugins/invdes/design.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@
class AbstractInverseDesign(InvdesBaseModel, abc.ABC):
"""Container for an inverse design problem."""

design_region: DesignRegionType = pd.Field(
design_regions: list[DesignRegionType] = pd.Field(
...,
title="Design Region",
description="Region within which we will optimize the simulation.",
title="Design Regions",
description="Regions within which we will optimize the simulation.",
)

task_name: str = pd.Field(
Expand Down
105 changes: 105 additions & 0 deletions tidy3d/plugins/invdes/optimization_spec.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# specification for running the optimizer

import abc

import autograd.numpy as anp
import numpy as np
import pydantic.v1 as pd

from .base import InvdesBaseModel
from .result import InverseDesignResult


class AbstractOptimizationSpec(InvdesBaseModel, abc.ABC):
"""Specification for an optimization."""

learning_rate: pd.PositiveFloat = pd.Field(
...,
title="Learning Rate",
description="Step size for the gradient descent optimizer.",
)

maximize: bool = pd.Field(
True,
title="Direction of Optimization",
description="If ``True``, the optimizer will maximize the objective function. If ``False``, the optimizer will minimize the objective function.",
)

num_steps: pd.PositiveInt = pd.Field(
...,
title="Number of Steps",
description="Number of steps in the gradient descent optimizer.",
)

@abc.abstractmethod
def initial_state(self, parameters: np.ndarray) -> dict:
"""The initial state of the optimizer."""

def display_fn(self, result: InverseDesignResult, step_index: int) -> None:
"""Default display function while optimizing."""
print(f"step ({step_index + 1}/{self.num_steps})")
print(f"\tobjective_fn_val = {result.objective_fn_val[-1]:.3e}")
print(f"\tgrad_norm = {anp.linalg.norm(result.grad[-1]):.3e}")
print(f"\tpost_process_val = {result.post_process_val[-1]:.3e}")
print(f"\tpenalty = {result.penalty[-1]:.3e}")


class AdamOptimizationSpec(AbstractOptimizationSpec):
"""Specification for an optimization."""

beta1: float = pd.Field(
0.9,
ge=0.0,
le=1.0,
title="Beta 1",
description="Beta 1 parameter in the Adam optimization method.",
)

beta2: float = pd.Field(
0.999,
ge=0.0,
le=1.0,
title="Beta 2",
description="Beta 2 parameter in the Adam optimization method.",
)

eps: pd.PositiveFloat = pd.Field(
1e-8,
title="Epsilon",
description="Epsilon parameter in the Adam optimization method.",
)

def initial_state(self, parameters: np.ndarray) -> dict:
"""initial state of the optimizer"""
zeros = np.zeros_like(parameters)
return dict(m=zeros, v=zeros, t=0)

def update(
self, parameters: np.ndarray, gradient: np.ndarray, state: dict = None
) -> tuple[np.ndarray, dict]:
if state is None:
state = self.initial_state(parameters)

# get state
m = np.array(state["m"])
v = np.array(state["v"])
t = int(state["t"])

# update time step
t = t + 1

# update moment variables
m = self.beta1 * m + (1 - self.beta1) * gradient
v = self.beta2 * v + (1 - self.beta2) * (gradient**2)

# compute bias-corrected moment variables
m_ = m / (1 - self.beta1**t)
v_ = v / (1 - self.beta2**t)

# update parameters and state
parameters -= self.learning_rate * m_ / (np.sqrt(v_) + self.eps)
state = dict(m=m, v=v, t=t)
return parameters, state


OptimizationSpecType = AdamOptimizationSpec
89 changes: 5 additions & 84 deletions tidy3d/plugins/invdes/optimizer.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
# specification for running the optimizer

import abc
import typing
from copy import deepcopy

import autograd as ag
import autograd.numpy as anp
import numpy as np
import pydantic.v1 as pd

import tidy3d as td
Expand All @@ -17,7 +15,7 @@
from .result import InverseDesignResult


class AbstractOptimizer(InvdesBaseModel, abc.ABC):
class Optimizer(InvdesBaseModel):
"""Specification for an optimization."""

design: InverseDesignType = pd.Field(
Expand All @@ -27,24 +25,6 @@ class AbstractOptimizer(InvdesBaseModel, abc.ABC):
discriminator=TYPE_TAG_STR,
)

learning_rate: pd.PositiveFloat = pd.Field(
...,
title="Learning Rate",
description="Step size for the gradient descent optimizer.",
)

maximize: bool = pd.Field(
True,
title="Direction of Optimization",
description="If ``True``, the optimizer will maximize the objective function. If ``False``, the optimizer will minimize the objective function.",
)

num_steps: pd.PositiveInt = pd.Field(
...,
title="Number of Steps",
description="Number of steps in the gradient descent optimizer.",
)

results_cache_fname: str = pd.Field(
None,
title="History Storage File",
Expand All @@ -67,10 +47,6 @@ class AbstractOptimizer(InvdesBaseModel, abc.ABC):
"last computed state of these variables.",
)

@abc.abstractmethod
def initial_state(self, parameters: np.ndarray) -> dict:
"""The initial state of the optimizer."""

def validate_pre_upload(self) -> None:
"""Validate the fully initialized optimizer is ok for upload to our servers."""
self.design.simulation.validate_pre_upload()
Expand Down Expand Up @@ -197,7 +173,10 @@ def continue_run(
post_process_val = aux_data["post_process_val"]

# update optimizer and parameters
params, opt_state = self.update(parameters=params, state=opt_state, gradient=-grad)
# note: would need to update every region in the list here
params, opt_state = self.design.region.update(
parameters=params, state=opt_state, gradient=-grad
)

# cap the parameters
params = anp.clip(params, a_min=0.0, a_max=1.0)
Expand Down Expand Up @@ -255,61 +234,3 @@ def continue_run_from_history(
post_process_fn=post_process_fn,
callback=callback,
)


class AdamOptimizer(AbstractOptimizer):
"""Specification for an optimization."""

beta1: float = pd.Field(
0.9,
ge=0.0,
le=1.0,
title="Beta 1",
description="Beta 1 parameter in the Adam optimization method.",
)

beta2: float = pd.Field(
0.999,
ge=0.0,
le=1.0,
title="Beta 2",
description="Beta 2 parameter in the Adam optimization method.",
)

eps: pd.PositiveFloat = pd.Field(
1e-8,
title="Epsilon",
description="Epsilon parameter in the Adam optimization method.",
)

def initial_state(self, parameters: np.ndarray) -> dict:
"""initial state of the optimizer"""
zeros = np.zeros_like(parameters)
return dict(m=zeros, v=zeros, t=0)

def update(
self, parameters: np.ndarray, gradient: np.ndarray, state: dict = None
) -> tuple[np.ndarray, dict]:
if state is None:
state = self.initial_state(parameters)

# get state
m = np.array(state["m"])
v = np.array(state["v"])
t = int(state["t"])

# update time step
t = t + 1

# update moment variables
m = self.beta1 * m + (1 - self.beta1) * gradient
v = self.beta2 * v + (1 - self.beta2) * (gradient**2)

# compute bias-corrected moment variables
m_ = m / (1 - self.beta1**t)
v_ = v / (1 - self.beta2**t)

# update parameters and state
parameters -= self.learning_rate * m_ / (np.sqrt(v_) + self.eps)
state = dict(m=m, v=v, t=t)
return parameters, state
6 changes: 6 additions & 0 deletions tidy3d/plugins/invdes/region.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

from .base import InvdesBaseModel
from .initialization import InitializationSpecType, UniformInitializationSpec
from .optimization_spec import OptimizationSpecType
from .penalty import PenaltyType
from .transformation import TransformationType

Expand Down Expand Up @@ -70,6 +71,11 @@ class DesignRegion(InvdesBaseModel, abc.ABC):
discriminator=TYPE_TAG_STR,
)

optimization_spec: OptimizationSpecType = pd.Field(
title="Optimization Spec",
description="specifices how this design region will be optimized by the Optimizer",
)

def _post_init_validators(self):
"""Automatically call any `_validate_XXX` method."""
for attr_name in dir(self):
Expand Down
Loading