# PDEs with Dependent Variables on Heterogeneous Domains

A differential equation is said to have heterogeneous domains when its dependent variables depend on different independent variables:

$$$u(x) + w(x, v) = \frac{\partial w(x, v)}{\partial w}$$$

Here, we write an arbitrary heterogeneous system:

using NeuralPDE, Lux, ModelingToolkit, Optimization, OptimizationOptimJL
import ModelingToolkit: Interval

@parameters x y
@variables p(..) q(..) r(..) s(..)
Dx = Differential(x)
Dy = Differential(y)

# 2D PDE
eq  = p(x) + q(y) + Dx(r(x, y)) + Dy(s(y, x)) ~ 0

# Initial and boundary conditions
bcs = [p(1) ~ 0.f0, q(-1) ~ 0.0f0,
r(x, -1) ~ 0.f0, r(1, y) ~ 0.0f0,
s(y, 1) ~ 0.0f0, s(-1, x) ~ 0.0f0]

# Space and time domains
domains = [x ∈ Interval(0.0, 1.0),
y ∈ Interval(0.0, 1.0)]

numhid = 3
chains = [[Lux.Chain(Dense(1, numhid, Lux.σ), Dense(numhid, numhid, Lux.σ), Dense(numhid, 1)) for i in 1:2];
[Lux.Chain(Dense(2, numhid, Lux.σ), Dense(numhid, numhid, Lux.σ), Dense(numhid, 1)) for i in 1:2]]
res = Optimization.solve(prob, BFGS(); callback = callback, maxiters=100)
u: ComponentVector{Float64}(depvar = (p = (layer_1 = (weight = [0.08396608584093485; 0.3884493496126974; 0.4788906693212563;;], bias = [0.030132275318143355; 0.06878948126950168; -0.1221140254986935;;]), layer_2 = (weight = [-0.09229247563831325 -0.9018794107662705 -0.76808730871007; 0.663119171057118 -0.12537286824906577 -0.12551030047334938; -0.8951814275067956 -1.0587303886367196 0.03190882177182445], bias = [-0.07281527785310428; -0.08134502513480135; -0.04855538337315815;;]), layer_3 = (weight = [0.2817037698351474 0.5048928172246093 -0.5245585492476995], bias = [-0.21033409478341258;;])), q = (layer_1 = (weight = [-0.2989922986237868; -0.9483757939495134; -1.0240076517237802;;], bias = [-0.03306595155728904; 0.07317402446076242; -0.03225767253716194;;]), layer_2 = (weight = [-0.11071017828075333 0.5364901522854089 -0.9583909656713653; 0.8263335674598872 0.07790311434235422 0.7391327568809866; 0.342963282125443 -0.7361479179374988 0.31447051815180754], bias = [-0.08430066443198476; -0.048142005511894584; -0.234090118127267;;]), layer_3 = (weight = [0.011773916453892306 0.2029821840665521 0.6980620921605808], bias = [-0.4418463132483842;;])), r = (layer_1 = (weight = [-0.5326505639154343 0.5696760948439659; -0.24572778767320202 0.19121479177688244; 0.8945962771255236 -0.05928307304909608], bias = [0.1260732601214293; -0.10079763571464039; -0.011012113614491862;;]), layer_2 = (weight = [0.31875019456437664 0.5106251637117236 0.5910843721870839; -0.24172347653927126 -0.25337664481914557 0.7977554982299423; -0.14933372682788532 0.24238492051204977 0.018278676031397722], bias = [0.03321496024008093; -0.019949719658745677; -0.0019456326309746056;;]), layer_3 = (weight = [0.004977608825987084 -0.002619926050430597 0.044520407889909644], bias = [-0.024723135750711084;;])), s = (layer_1 = (weight = [0.5505640849363819 0.2875270814932817; 0.7819047893665848 -0.27995164386791127; 0.5671466076351345 0.1884799420416614], bias = [0.0019621614652133587; 0.004822188730490967; -0.027233092452912014;;]), layer_2 = (weight = [-0.9330099623958164 0.8709647083371942 -0.17978803551506678; 0.03583082625467596 -0.6252817757149369 0.5963060977283502; -0.4016350913786598 -0.06598016977798878 0.4426788232258018], bias = [-0.02646127795060666; -0.02689419546833513; -0.07708982393127241;;]), layer_3 = (weight = [0.11216129817166617 0.16056329975258302 -0.7507110053171575], bias = [0.22694396183848964;;]))))