Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add label to input layers and more efficient circuit construction #347

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cirkit/symbolic/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def _default_product_label(sl: ProductLayer) -> str:
)

def _default_input_label(sl: InputLayer) -> str:
return " ".join(map(str, sl.scope))
return " ".join(map(str, sl.scope)) if sl.label is None else sl.label

if product_label is None:
product_label = _default_product_label
Expand Down
87 changes: 68 additions & 19 deletions cirkit/symbolic/layers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from abc import ABC, abstractmethod
from collections.abc import Mapping
from enum import IntEnum, auto
from typing import Any, cast
from typing import Any, Callable, cast

from cirkit.symbolic.initializers import NormalInitializer
from cirkit.symbolic.parameters import (
Expand All @@ -12,6 +12,7 @@
SoftmaxParameter,
TensorParameter,
)
from cirkit.utils.label import LayerLabel
from cirkit.utils.scope import Scope


Expand Down Expand Up @@ -44,6 +45,7 @@ def __init__(
num_input_units: int,
num_output_units: int,
arity: int = 1,
label: LayerLabel | None = None,
):
"""Initializes a symbolic layer.

Expand All @@ -52,6 +54,7 @@ def __init__(
num_output_units: The number of output units, i.e., the number of computational units
in this layer.
arity: The arity of the layer, i.e., the number of input layers to this layer.
label: The label of this layer.

Raises:
ValueError: If the number of input units, output units or the arity are not positvie.
Expand All @@ -65,6 +68,16 @@ def __init__(
self.num_input_units = num_input_units
self.num_output_units = num_output_units
self.arity = arity
self._label = label

@property
def label(self) -> LayerLabel | None:
"""Retrieves the label of the layer.

Returns:
LayerLabel | None: The label of the layer or None, if it is not set.
"""
return self._label

@property
@abstractmethod
Expand Down Expand Up @@ -108,6 +121,7 @@ def __repr__(self) -> str:
f"num_input_units={self.num_input_units}, "
f"num_output_units={self.num_output_units}, "
f"arity={self.arity}, "
f"label={self._label},"
f"config=({config_repr}), "
f"params=({params_repr})"
)
Expand All @@ -116,19 +130,20 @@ def __repr__(self) -> str:
class InputLayer(Layer, ABC):
"""The symbolic input layer class."""

def __init__(self, scope: Scope, num_output_units: int):
def __init__(self, scope: Scope, num_output_units: int, label: LayerLabel | None = None):
"""Initializes a symbolic input layer.

Args:
scope: The variables scope of the layer.
num_output_units: The number of input units in the layer.
label: The label of this layer.

Raises:
ValueError: If the number of outputs is not positive.
"""
if num_output_units <= 0:
raise ValueError("The number of output units should be positive")
super().__init__(len(scope), num_output_units)
super().__init__(len(scope), num_output_units, label=label)
self.scope = scope

@property
Expand Down Expand Up @@ -156,13 +171,14 @@ def __repr__(self) -> str:
class ConstantLayer(InputLayer, ABC):
"""The symbolic layer computing a constant vector, i.e., it does not depend on any variable."""

def __init__(self, num_output_units: int):
def __init__(self, num_output_units: int, label: LayerLabel | None = None):
"""Initializes a symbolic constant layer.

Args:
num_output_units: The number of input units in the layer.
label: The label of this input layer.
"""
super().__init__(Scope([]), num_output_units)
super().__init__(Scope([]), num_output_units, label=label)


class EvidenceLayer(ConstantLayer):
Expand Down Expand Up @@ -214,6 +230,7 @@ def __init__(
scope: Scope,
num_output_units: int,
*,
label: LayerLabel | None = None,
num_states: int = 2,
weight: Parameter | None = None,
weight_factory: ParameterFactory | None = None,
Expand Down Expand Up @@ -260,6 +277,7 @@ def config(self) -> Mapping[str, Any]:
"scope": self.scope,
"num_output_units": self.num_output_units,
"num_states": self.num_states,
"label": self.label,
}

@property
Expand All @@ -277,6 +295,7 @@ def __init__(
scope: Scope,
num_output_units: int,
*,
label: LayerLabel | None = None,
num_categories: int,
logits: Parameter | None = None,
probs: Parameter | None = None,
Expand Down Expand Up @@ -310,7 +329,7 @@ def __init__(
)
if num_categories < 2:
raise ValueError("At least two categories must be specified")
super().__init__(scope, num_output_units)
super().__init__(scope, num_output_units, label=label)
self.num_categories = num_categories
if logits is None and probs is None:
if logits_factory is not None:
Expand Down Expand Up @@ -343,6 +362,7 @@ def config(self) -> Mapping[str, Any]:
"scope": self.scope,
"num_output_units": self.num_output_units,
"num_categories": self.num_categories,
"label": self.label,
}

@property
Expand All @@ -362,6 +382,7 @@ def __init__(
scope: Scope,
num_output_units: int,
*,
label: LayerLabel | None = None,
total_count: int = 2,
logits: Parameter | None = None,
probs: Parameter | None = None,
Expand Down Expand Up @@ -426,6 +447,7 @@ def config(self) -> dict:
"scope": self.scope,
"num_output_units": self.num_output_units,
"total_count": self.total_count,
"label": self.label,
}

@property
Expand All @@ -445,6 +467,7 @@ def __init__(
scope: Scope,
num_output_units: int,
*,
label: LayerLabel | None = None,
mean: Parameter | None = None,
stddev: Parameter | None = None,
log_partition: Parameter | None = None,
Expand Down Expand Up @@ -533,6 +556,7 @@ def __init__(
scope: Scope,
num_output_units: int,
*,
label: LayerLabel | None = None,
degree: int,
coeff: Parameter | None = None,
coeff_factory: ParameterFactory | None = None,
Expand Down Expand Up @@ -576,6 +600,7 @@ def config(self) -> Mapping[str, Any]:
"scope": self.scope,
"num_output_units": self.num_output_units,
"degree": self.degree,
"label": self.label,
}

@property
Expand All @@ -586,13 +611,21 @@ def params(self) -> Mapping[str, Parameter]:
class ConstantValueLayer(ConstantLayer):
"""A symbolic layer computing a constant function encoded by a parameter."""

def __init__(self, num_output_units: int, *, log_space: bool = False, value: Parameter):
def __init__(
self,
num_output_units: int,
*,
log_space: bool = False,
value: Parameter,
label: LayerLabel | None = None,
):
"""Initializes a constant value layer.

Args:
num_output_units: The number of output log partition functions.
log_space: Whether the given value is in the log-space, i.e., this constant
layer should encode ```exp(value)``` rather than ```value```.
label: The label of this input layer.
value: The symbolic parameter representing the encoded value.
This symbolic paramater should have shape (K,), where K is the number of
output units.
Expand All @@ -609,7 +642,11 @@ def _value_shape(self) -> tuple[int, ...]:

@property
def config(self) -> Mapping[str, Any]:
return {"num_output_units": self.num_output_units, "log_space": self.log_space}
return {
"num_output_units": self.num_output_units,
"log_space": self.log_space,
"label": self.label,
}

@property
def params(self) -> Mapping[str, Parameter]:
Expand All @@ -619,42 +656,51 @@ def params(self) -> Mapping[str, Parameter]:
class ProductLayer(Layer, ABC):
"""The abstract base class for symbolic product layers."""

def __init__(self, num_input_units: int, num_output_units: int, arity: int = 2):
def __init__(
self,
num_input_units: int,
num_output_units: int,
arity: int = 2,
*,
label: LayerLabel | None = None,
):
"""Initializes a product layer.

Args:
num_input_units: The number of units in each input layer.
num_output_units: The number of product units in the product layer.
arity: The arity of the layer, i.e., the number of input layers to the product layer.
label: The label of this input layer.

Raises:
ValueError: If the arity is less than two.
"""
if arity < 2:
raise ValueError("The arity should be at least 2")
super().__init__(num_input_units, num_output_units, arity)
super().__init__(num_input_units, num_output_units, arity, label=label)


class HadamardLayer(ProductLayer):
"""The symbolic element-wise product (or Hadamard) layer. This layer computes the element-wise
product of the vectors given in output by some input layers. Therefore, the number of product
units in the layer is equal to the number of units in each input layer."""

def __init__(self, num_input_units: int, arity: int = 2):
def __init__(self, num_input_units: int, arity: int = 2, *, label: LayerLabel | None = None):
"""Initializes a Hadamard product layer.

Args:
num_input_units: The number of units in each input layer.
arity: The arity of the layer, i.e., the number of input layers to the product layer.
label: The label of this input layer.

Raises:
ValueError: If the arity is less than two.
"""
super().__init__(num_input_units, num_input_units, arity=arity)
super().__init__(num_input_units, num_input_units, arity=arity, label=label)

@property
def config(self) -> Mapping[str, Any]:
return {"num_input_units": self.num_input_units, "arity": self.arity}
return {"num_input_units": self.num_input_units, "arity": self.arity, "label": self.label}


class KroneckerLayer(ProductLayer):
Expand All @@ -663,27 +709,26 @@ class KroneckerLayer(ProductLayer):
units in the layer is equal to the product of the number of units in each input layer.
Note that the output of a Kronecker layer is a vector."""

def __init__(self, num_input_units: int, arity: int = 2):
def __init__(self, num_input_units: int, arity: int = 2, *, label: LayerLabel | None = None):
"""Initializes a Kronecker product layer.

Args:
num_input_units: The number of units in each input layer.
arity: The arity of the layer, i.e., the number of input layers to the product layer.
label: The label of this input layer.

Raises:
ValueError: If the arity is less than two.
"""
if arity < 2:
raise ValueError("The arity should be at least 2")
super().__init__(
num_input_units,
cast(int, num_input_units**arity),
arity=arity,
num_input_units, cast(int, num_input_units**arity), arity=arity, label=label
)

@property
def config(self) -> Mapping[str, Any]:
return {"num_input_units": self.num_input_units, "arity": self.arity}
return {"num_input_units": self.num_input_units, "arity": self.arity, "label": self.label}


class SumLayer(Layer):
Expand All @@ -709,6 +754,8 @@ def __init__(
arity: int = 1,
weight: Parameter | None = None,
weight_factory: ParameterFactory | None = None,
*,
label: LayerLabel | None = None,
):
r"""Initializes a dense layer.

Expand All @@ -723,8 +770,9 @@ def __init__(
if the given weight is None. If this factory is also None, then a weight
parameter with [NormalInitializer][cirkit.symbolic.initializers.NormalInitializer]
as initializer will be instantiated.
label: The label of this input layer.
"""
super().__init__(num_input_units, num_output_units, arity=arity)
super().__init__(num_input_units, num_output_units, arity=arity, label=label)
if weight is None:
if weight_factory is None:
weight = Parameter.from_input(
Expand All @@ -746,6 +794,7 @@ def config(self) -> Mapping[str, Any]:
"num_input_units": self.num_input_units,
"num_output_units": self.num_output_units,
"arity": self.arity,
"label": self.label,
}

@property
Expand Down
Loading
Loading