-
-
Notifications
You must be signed in to change notification settings - Fork 1.8k
Tensorflow keras layer #9707
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Tensorflow keras layer #9707
Changes from all commits
71e7479
deb5c7d
0e76cfd
7bcb56c
6a241d1
e1f6d3e
06e9bb2
b3b3e05
068a530
0d0ad27
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -15,8 +15,52 @@ tensorflow.DType.__getattr__ | |
tensorflow.Graph.__getattr__ | ||
tensorflow.Operation.__getattr__ | ||
tensorflow.Variable.__getattr__ | ||
tensorflow.keras.layers.Layer.__getattr__ | ||
# Internal undocumented API | ||
tensorflow.RaggedTensor.__init__ | ||
# Has an undocumented extra argument that tf.Variable which acts like subclass | ||
# (by dynamically patching tf.Tensor methods) does not preserve. | ||
tensorflow.Tensor.__getitem__ | ||
# stub internal utility | ||
tensorflow._aliases | ||
|
||
# Tensorflow imports are cursed. | ||
# import tensorflow.initializers | ||
# import tensorflow as tf | ||
# tf.initializers | ||
# Usually these two ways are same module, but for tensorflow the first way | ||
# often does not work and the second way does. The documentation describes | ||
# tf.initializers as module and has that type if accessed the second way, | ||
# but the real module file is completely different name (even package) and dynamically handled. | ||
# tf.initializers at runtime is <module 'keras.api._v2.keras.initializers' from '...'> | ||
tensorflow.initializers | ||
|
||
# Layer constructor's always have **kwargs, but only allow a few specific values. PEP 692 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I don't think we have PEP 692 tracker ticket yet. Will make one later if no one covers it first. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think PEP 692 might already have sufficient support to use it; at least mypy and pyright support it. Haven't checked pytype though. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Testing basic 692 support: class Foo(TypedDict, total=False):
a: int
b: str
def f(**kwargs: Unpack[Foo]) -> None:
...
f(a=1, b="2") # OK
f(a=1, b=2) # Error: b has type str
f(a=1, b="2", c=3) # Error: unexpected keyword argument "c"
I'll open ticket to track. |
||
# would allow us to specify this with **kwargs and remove the need for these exceptions. | ||
tensorflow.keras.layers.*.__init__ | ||
|
||
# __call__ in tensorflow classes often allow keyword usage, but | ||
# when you subclass those classes it is not expected to handle keyword case. As an example, | ||
# class MyLayer(tf.keras.layers.Layer): | ||
# def call(self, x): | ||
# ... | ||
# is common even though Layer.call is defined like def call(self, inputs). Treating inputs as | ||
# a keyword argument would lead to many false positives with typical subclass usage. | ||
# Additional awkwardness for Layer's is call may optionally have training/mask as keyword arguments and some | ||
# layers do while others do not. At runtime call is not intended to be used directly by users, | ||
# but instead through __call__ which extracts out the training/mask arguments. Trying to describe | ||
# this better in stubs would similarly add many false positive Liskov violations. | ||
tensorflow.keras.layers.*.call | ||
tensorflow.keras.regularizers.Regularizer.__call__ | ||
tensorflow.keras.constraints.Constraint.__call__ | ||
|
||
# Layer class does good deal of __new__ magic and actually returns one of two different internal | ||
# types depending on tensorflow execution mode. This feels like implementation internal. | ||
tensorflow.keras.layers.Layer.__new__ | ||
|
||
# build/compute_output_shape are marked positional only in stubs | ||
# as argument name is inconsistent across layer's and looks like | ||
# an implementation detail as documentation never mentions the | ||
# disagreements. | ||
tensorflow.keras.layers.*.build | ||
tensorflow.keras.layers.*.compute_output_shape |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
# Commonly used type aliases. | ||
# Everything in this module is private for stubs. There is no runtime | ||
# equivalent. | ||
|
||
from collections.abc import Mapping, Sequence | ||
from typing import Any, TypeVar | ||
from typing_extensions import TypeAlias | ||
|
||
import numpy | ||
|
||
_T1 = TypeVar("_T1") | ||
ContainerGeneric: TypeAlias = Mapping[str, ContainerGeneric[_T1]] | Sequence[ContainerGeneric[_T1]] | _T1 | ||
JelleZijlstra marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
AnyArray: TypeAlias = numpy.ndarray[Any, Any] |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
from tensorflow.keras.initializers import * |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
from _typeshed import Incomplete | ||
|
||
from tensorflow.keras import ( | ||
activations as activations, | ||
constraints as constraints, | ||
initializers as initializers, | ||
layers as layers, | ||
regularizers as regularizers, | ||
) | ||
|
||
def __getattr__(name: str) -> Incomplete: ... |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
from _typeshed import Incomplete | ||
from collections.abc import Callable | ||
from typing import Any | ||
from typing_extensions import TypeAlias | ||
|
||
from tensorflow import Tensor | ||
|
||
# The implementation uses isinstance so it must be dict and not any Mapping. | ||
_Activation: TypeAlias = str | None | Callable[[Tensor], Tensor] | dict[str, Any] | ||
|
||
def get(identifier: _Activation) -> Callable[[Tensor], Tensor]: ... | ||
def __getattr__(name: str) -> Incomplete: ... |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
from _typeshed import Incomplete | ||
from collections.abc import Callable | ||
from typing import Any, overload | ||
|
||
from tensorflow import Tensor | ||
|
||
class Constraint: | ||
def get_config(self) -> dict[str, Any]: ... | ||
def __call__(self, __w: Tensor) -> Tensor: ... | ||
|
||
@overload | ||
def get(identifier: None) -> None: ... | ||
@overload | ||
def get(identifier: str | dict[str, Any] | Constraint) -> Constraint: ... | ||
@overload | ||
def get(identifier: Callable[[Tensor], Tensor]) -> Callable[[Tensor], Tensor]: ... | ||
def __getattr__(name: str) -> Incomplete: ... |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
from _typeshed import Incomplete | ||
from collections.abc import Callable | ||
from typing import Any, overload | ||
from typing_extensions import Self, TypeAlias | ||
|
||
from tensorflow import Tensor, _DTypeLike, _ShapeLike, _TensorCompatible | ||
|
||
class Initializer: | ||
def __call__(self, shape: _ShapeLike, dtype: _DTypeLike | None = None) -> Tensor: ... | ||
def get_config(self) -> dict[str, Any]: ... | ||
@classmethod | ||
def from_config(cls, config: dict[str, Any]) -> Self: ... | ||
|
||
class Constant(Initializer): | ||
def __init__(self, value: _TensorCompatible = 0) -> None: ... | ||
|
||
class GlorotNormal(Initializer): | ||
def __init__(self, seed: int | None = None) -> None: ... | ||
|
||
class GlorotUniform(Initializer): | ||
def __init__(self, seed: int | None = None) -> None: ... | ||
|
||
class TruncatedNormal(Initializer): | ||
def __init__(self, mean: _TensorCompatible = 0.0, stddev: _TensorCompatible = 0.05, seed: int | None = None) -> None: ... | ||
|
||
class RandomNormal(Initializer): | ||
def __init__(self, mean: _TensorCompatible = 0.0, stddev: _TensorCompatible = 0.05, seed: int | None = None) -> None: ... | ||
|
||
class RandomUniform(Initializer): | ||
def __init__(self, minval: _TensorCompatible = -0.05, maxval: _TensorCompatible = 0.05, seed: int | None = None) -> None: ... | ||
|
||
class Zeros(Initializer): ... | ||
|
||
constant = Constant | ||
glorot_normal = GlorotNormal | ||
glorot_uniform = GlorotUniform | ||
truncated_normal = TruncatedNormal | ||
zeros = Zeros | ||
|
||
_Initializer: TypeAlias = ( # noqa: Y047 | ||
str | Initializer | type[Initializer] | Callable[[_ShapeLike], Tensor] | dict[str, Any] | None | ||
) | ||
|
||
@overload | ||
def get(identifier: None) -> None: ... | ||
@overload | ||
def get(identifier: str | Initializer | dict[str, Any] | type[Initializer]) -> Initializer: ... | ||
@overload | ||
def get(identifier: Callable[[_ShapeLike], Tensor]) -> Callable[[_ShapeLike], Tensor]: ... | ||
def __getattr__(name: str) -> Incomplete: ... |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This is probably most cursed weirdness of imports (pylint is unhappy too with this). A lot of other tf modules do similar thing and this part of allowlist will grow over time.