Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Notebook for adding input layers and operators #275

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions cirkit/backend/torch/parameters/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -811,10 +811,16 @@ def __init__(self, in_shape: tuple[int, ...], *, num_folds: int = 1, order: int
def shape(self) -> tuple[int, ...]:
# if dp1>order, i.e., deg>=order, then diff, else const 0.
return (
self.in_shapes[0][0],
self.in_shapes[0][1] - self.order if self.in_shapes[0][1] > self.order else 1,
self.in_shapes[0][0], # dim Ko
self.in_shapes[0][1] - self.order
if self.in_shapes[0][1] > self.order
else 1, # dim dp1
)

@property
def config(self) -> dict[str, Any]:
return {**super().config, "order": self.order}

@classmethod
def _diff_once(cls, x: Tensor) -> Tensor:
degp1 = x.shape[-1] # x shape (F, K, dp1).
Expand Down
2 changes: 1 addition & 1 deletion cirkit/backend/torch/semiring.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from abc import ABC, abstractmethod
from collections.abc import Callable, Iterable, Sequence
from typing import ClassVar, TypeVar, cast
from typing_extensions import TypeVarTuple, Unpack, final
from typing_extensions import TypeVarTuple, Unpack, final # FUTURE: in typing from 3.11

import torch
from torch import Tensor
Expand Down
17 changes: 10 additions & 7 deletions cirkit/symbolic/functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,19 +306,19 @@ def differentiate(
if order <= 0:
raise ValueError("The order of differentiation must be positive.")

# Use the registry in the current context, if not specified otherwise
# Use the registry in the current context, if not specified otherwise.
if registry is None:
registry = OPERATOR_REGISTRY.get()

# Mapping the symbolic circuit layers with blocks of circuit layers
# Mapping the symbolic circuit layers with blocks of circuit layers.
layers_to_blocks: dict[Layer, list[CircuitBlock]] = {}

# For each new circuit block, keep track of its inputs
# For each new circuit block, keep track of its inputs.
in_blocks: dict[CircuitBlock, Sequence[CircuitBlock]] = {}

for sl in sc.topological_ordering():
# "diff_blocks: List[CircuitBlock]" is the diff of sl wrt each variable and channel in order
# and then at the end we append a copy of sl
# and then at the end we append a copy of sl.
sl_params = {name: p.ref() for name, p in sl.params.items()}

if isinstance(sl, InputLayer):
Expand Down Expand Up @@ -348,6 +348,7 @@ def differentiate(
# The layers are the same for all diffs of a SumLayer. We retrieve (num_vars * num_chs)
# from the length of one input blocks.
var_ch = len(layers_to_blocks[sc.layer_inputs(sl)[0]][:-1])
# TODO: make a shortcut for the copy idiom?
diff_blocks = [
CircuitBlock.from_layer(type(sl)(**sl.config, **sl_params)) for _ in range(var_ch)
]
Expand Down Expand Up @@ -433,13 +434,15 @@ def differentiate(
# Save all the blocks including a copy of sl at [-1] as the diff layers of sl.
layers_to_blocks[sl] = diff_blocks

# Construct the integral symbolic circuit and set the integration operation metadata
# Construct the differential symbolic circuit and set the differentiation operation metadata.
return Circuit.from_operation(
sc.scope,
sc.num_channels,
sum(layers_to_blocks.values(), []),
list(itertools.chain.from_iterable(layers_to_blocks.values())),
in_blocks, # TODO: in_blocks uses Sequence, and Sequence should work.
sum((layers_to_blocks[sl] for sl in sc.outputs), []),
itertools.chain.from_iterable(
layers_to_blocks[sl] for sl in sc.outputs
), # TODO: Iterable should work
operation=CircuitOperation(
operator=CircuitOperator.DIFFERENTIATION,
operands=(sc,),
Expand Down
44 changes: 35 additions & 9 deletions cirkit/symbolic/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from enum import IntEnum, auto
from typing import Any, cast

from cirkit.symbolic.initializers import NormalInitializer
from cirkit.symbolic.initializers import Initializer, NormalInitializer
from cirkit.symbolic.parameters import (
Parameter,
ParameterFactory,
Expand Down Expand Up @@ -86,6 +86,37 @@
"""
return {}

# TODO: apply to other layers
@staticmethod
def _make_param(
param: Parameter | None,
param_factory: ParameterFactory | None,
shape: tuple[int, ...],
default_factory: ParameterFactory = lambda shape: Parameter.from_leaf(
TensorParameter(*shape, initializer=NormalInitializer())
),
) -> Parameter:
"""Make a parameter from the optional parameter object or factory or default.

Args:
param (Optional[Parameter]): The optional parameter provided to a layer.
param_factory (Optional[ParameterFactory]): The optional parameter factory provided to
a layer.
shape (Tuple[int, ...]): The shape of the parameter.
default_factory (ParameterFactory, optional): The factory to use when falling back to
default. Defaults to a new leaf TensorParameter with NormalInitializer.

Returns:
Parameter: The parameter.
"""
if param is not None:
return param

if param_factory is not None:
return param_factory(shape)

Check warning on line 116 in cirkit/symbolic/layers.py

View check run for this annotation

Codecov / codecov/patch

cirkit/symbolic/layers.py#L116

Added line #L116 was not covered by tests

return default_factory(shape)


class InputLayer(Layer):
"""The symbolic input layer class."""
Expand Down Expand Up @@ -318,13 +349,7 @@
raise ValueError("The Polynomial layer encodes a univariate distribution")
super().__init__(scope, num_output_units, num_channels)
self.degree = degree
if coeff is None:
if coeff_factory is None:
coeff = Parameter.from_leaf(
TensorParameter(*self._coeff_shape, initializer=NormalInitializer())
)
else:
coeff = coeff_factory(self._coeff_shape)
coeff = self._make_param(coeff, coeff_factory, self._coeff_shape)
if coeff.shape != self._coeff_shape:
raise ValueError(f"Expected parameter shape {self._coeff_shape}, found {coeff.shape}")
self.coeff = coeff
Expand All @@ -334,7 +359,8 @@
return self.num_output_units, self.degree + 1

@property
def config(self) -> dict:
def config(self) -> dict[str, Any]:
# FUTURE: use | operator in 3.9
return {**super().config, "degree": self.degree}

@property
Expand Down
3 changes: 1 addition & 2 deletions cirkit/symbolic/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,9 +177,8 @@ def multiply_polynomial_layers(sl1: PolynomialLayer, sl2: PolynomialLayer) -> Ci
f"but found '{sl1.num_channels}' and '{sl2.num_channels}'"
)

shape1, shape2 = sl1.coeff.shape, sl2.coeff.shape
coeff = Parameter.from_binary(
PolynomialProduct(shape1, shape2),
PolynomialProduct(sl1.coeff.shape, sl2.coeff.shape),
sl1.coeff.ref(),
sl2.coeff.ref(),
)
Expand Down
10 changes: 8 additions & 2 deletions cirkit/symbolic/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,6 +572,12 @@
def shape(self) -> tuple[int, ...]:
# if dp1>order, i.e., deg>=order, then diff, else const 0.
return (
self.in_shapes[0][0],
self.in_shapes[0][1] - self.order if self.in_shapes[0][1] > self.order else 1,
self.in_shapes[0][0], # dim Ko
self.in_shapes[0][1] - self.order
if self.in_shapes[0][1] > self.order
else 1, # dim dp1
)

@property
def config(self) -> dict[str, Any]:
return {**super().config, "order": self.order}

Check warning on line 583 in cirkit/symbolic/parameters.py

View check run for this annotation

Codecov / codecov/patch

cirkit/symbolic/parameters.py#L583

Added line #L583 was not covered by tests
Loading
Loading