Skip to content

Commit

Permalink
Merge branch 'unifyai:main' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
Monsurat-Onabajo authored Sep 14, 2023
2 parents 01c97ce + 19540d4 commit 97a98ef
Show file tree
Hide file tree
Showing 25 changed files with 327 additions and 137 deletions.
74 changes: 54 additions & 20 deletions .github/workflows/auto-comment.yml
Original file line number Diff line number Diff line change
@@ -1,28 +1,62 @@
name: Auto Comment
name: Check Semantic and welcome new contributors

on:
pull_request_target:
types: [opened]
types:
- opened
- edited
- synchronize
- reopened
workflow_call:

permissions:
pull-requests: write

jobs:
auto_comment:
semantics:
name: Semantics
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: amannn/[email protected]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

pr-compliance-checks:
name: PR Compliance Checks
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Comment
uses: actions/github-script@v6
- uses: mtfoley/[email protected]
with:
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `Thanks for contributing to Ivy! 😊👏
Here are some of the important points from our Contributing Guidelines 📝:
1. Feel free to ignore the \`run_tests (1)\`, \`run_tests (2)\`, … jobs, and only look at the \`display_test_results\` job. 👀 It contains the following two sections:
- **Combined Test Results:** This shows the results of all the ivy tests that ran on the PR. ✔️
- **New Failures Introduced:** This lists the tests that are passing on main, but fail on the PR Fork. Please try to make sure that there are no such tests. 💪
2. The \`lint / Check formatting / check-formatting\` tests check for the formatting of your code. 📜 If it fails, please check the exact error message in the logs and fix the same. ⚠️🔧
3. Finally, the \`test-docstrings / run-docstring-tests\` check for the changes made in docstrings of the functions. This may be skipped, as well. 📚
Happy coding! 🎉👨‍💻`
})
body-auto-close: false
protected-branch-auto-close: false
body-comment: >
## Issue Reference
In order to be considered for merging, the pull request description must refer to a
specific issue number. This is described in our
[contributing guide](https://unify.ai/docs/ivy/overview/contributing/the_basics.html#todo-list-issues) and our PR template.
This check is looking for a phrase similar to: "Fixes #XYZ" or "Resolves #XYZ" where XYZ is the issue
number that this PR is meant to address.
welcome:
name: Welcome
runs-on: ubuntu-latest
timeout-minutes: 10
needs: semantics
if: github.event.action == 'opened'
steps:
- uses: actions/first-interaction@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
pr-message: |-
Congrats on making your first Pull Request and thanks for supporting Ivy! 🎉
Joing the conversation in our [Discord](https://discord.com/invite/sXyFF8tDtm)
Here are some notes to understand our tests:
- We have merged all the tests in one file called \`display_test_results\` job. 👀 It contains the following two sections:
- **Combined Test Results:** This shows the results of all the ivy tests that ran on the PR. ✔️
- **New Failures Introduced:** This lists the tests that are passing on main, but fail on the PR Fork.
Please try to make sure that there are no such tests. 💪
26 changes: 26 additions & 0 deletions .github/workflows/dockerfile-multiversion-push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Dockerfile Mutiversion Push

on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:

jobs:

build:
runs-on: ubuntu-latest

steps:
- name: Checkout Ivy 🛎
uses: actions/checkout@v3

- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Build and push Dockerfile
run: |
docker build --progress=plain --no-cache -t unifyai/multiversion:latest -f docker/DockerfileMultiversion .
docker push unifyai/multiversion:latest
2 changes: 1 addition & 1 deletion docker/DockerfileMultiversion
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ FROM debian:buster
WORKDIR /ivy

ARG fw
ARG pycon=3.8.10
ARG pycon=3.10
# Install miniconda
ENV CONDA_DIR /opt/miniconda/

Expand Down
41 changes: 32 additions & 9 deletions docker/requirement_mappings_multiversion.json
Original file line number Diff line number Diff line change
@@ -1,11 +1,34 @@

{
"tensorflow": [
{"tensorflow-probability":{"2.12.0":"0.20.0","2.11.0":"0.19.0"}}
],
"jax": ["dm-haiku", "flax",{"jaxlib": {"0.4.10": "0.4.10","0.4.8": "0.4.7"}}],
"numpy": ["numpy"],
"paddle": ["paddlepaddle"],
"mxnet": ["mxnet"],
"torch": ["torch-scatter"]
"tensorflow": [
{
"tensorflow-probability": {
"2.13.0": "0.21.0",
"2.12.0": "0.20.0",
"2.11.0": "0.19.0"
}
}
],
"jax": [
"dm-haiku",
"flax",
{
"jaxlib": {
"0.4.14": "0.4.14",
"0.4.10": "0.4.10",
"0.4.8": "0.4.7"
}
}
],
"numpy": [
"numpy"
],
"paddle": [
"paddlepaddle"
],
"mxnet": [
"mxnet"
],
"torch": [
"torch-scatter"
]
}
2 changes: 1 addition & 1 deletion docs/demos
Submodule demos updated from 23d8da to 8163e5
13 changes: 11 additions & 2 deletions ivy/data_classes/array/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,13 @@ def gelu(
self._data, approximate=approximate, complex_mode=complex_mode, out=out
)

def sigmoid(self: ivy.Array, /, *, out: Optional[ivy.Array] = None) -> ivy.Array:
def sigmoid(
self: ivy.Array,
/,
*,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
out: Optional[ivy.Array] = None,
) -> ivy.Array:
"""
ivy.Array instance method variant of ivy.sigmoid.
Expand All @@ -142,6 +148,9 @@ def sigmoid(self: ivy.Array, /, *, out: Optional[ivy.Array] = None) -> ivy.Array
----------
self
Input array
complex_mode
optional specifier for how to handle complex data types. See
``ivy.func_wrapper.handle_complex_input`` for more detail.
out
optional output array for writing the result to. It must have the same shape
the input broadcast to default: None
Expand All @@ -159,7 +168,7 @@ def sigmoid(self: ivy.Array, /, *, out: Optional[ivy.Array] = None) -> ivy.Array
>>> print(y)
ivy.array([0.269, 0.731, 0.881])
"""
return ivy.sigmoid(self._data, out=out)
return ivy.sigmoid(self._data, complex_mode=complex_mode, out=out)

def softmax(
self: ivy.Array,
Expand Down
10 changes: 10 additions & 0 deletions ivy/data_classes/container/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,7 @@ def _static_sigmoid(
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
out: Optional[ivy.Container] = None,
) -> ivy.Container:
"""
Expand All @@ -441,6 +442,9 @@ def _static_sigmoid(
map_sequences
Whether to also map method to sequences (lists, tuples).
Default is ``False``.
complex_mode
optional specifier for how to handle complex data types. See
``ivy.func_wrapper.handle_complex_input`` for more detail.
out
optional output container, for writing the result to. It must have a shape
that the inputs broadcast to.
Expand All @@ -467,6 +471,7 @@ def _static_sigmoid(
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
complex_mode=complex_mode,
out=out,
)

Expand All @@ -478,6 +483,7 @@ def sigmoid(
to_apply: Union[bool, ivy.Container] = True,
prune_unapplied: Union[bool, ivy.Container] = False,
map_sequences: Union[bool, ivy.Container] = False,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
out: Optional[ivy.Container] = None,
) -> ivy.Container:
"""
Expand All @@ -500,6 +506,9 @@ def sigmoid(
map_sequences
Whether to also map method to sequences (lists, tuples).
Default is ``False``.
complex_mode
optional specifier for how to handle complex data types. See
``ivy.func_wrapper.handle_complex_input`` for more detail.
out
optional output container, for writing the result to. It must have a shape
that the inputs broadcast to.
Expand All @@ -525,6 +534,7 @@ def sigmoid(
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
complex_mode=complex_mode,
out=out,
)

Expand Down
Binary file modified ivy/engines/XLA/rust_api/python_frontend/stateful_layers.cpython-310-x86_64-linux-gnu.so
100644 → 100755
Binary file not shown.
Binary file modified ivy/engines/XLA/rust_api/python_frontend/xla_core.cpython-310-x86_64-linux-gnu.so
100644 → 100755
Binary file not shown.
Binary file modified ivy/engines/ivy2xla.cpython-310-x86_64-linux-gnu.so
Binary file not shown.
4 changes: 3 additions & 1 deletion ivy/functional/backends/jax/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@ def relu(
return jnp.maximum(x, 0)


def sigmoid(x: JaxArray, /, *, out: Optional[JaxArray] = None) -> JaxArray:
def sigmoid(
x: JaxArray, /, *, complex_mode="jax", out: Optional[JaxArray] = None
) -> JaxArray:
return 1 / (1 + jnp.exp(-x))


Expand Down
4 changes: 3 additions & 1 deletion ivy/functional/backends/numpy/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def gelu(
return ivy.astype(ret, x.dtype, copy=False)


def sigmoid(x: np.ndarray, /, *, out: Optional[np.ndarray] = None) -> np.ndarray:
def sigmoid(
x: np.ndarray, /, *, complex_mode="jax", out: Optional[np.ndarray] = None
) -> np.ndarray:
if not ivy.is_array(x):
return np.asarray(1 / (1 + np.exp(-x)))
return np.asarray(1 / (1 + np.exp(-x))).astype(x.dtype)
Expand Down
9 changes: 6 additions & 3 deletions ivy/functional/backends/paddle/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,15 @@ def gelu(
return F.gelu(x, approximate=approximate)


@with_unsupported_device_and_dtypes(
{"2.5.1 and below": {"cpu": ("bfloat16",)}}, backend_version
)
def sigmoid(
x: paddle.Tensor, /, *, out: Optional[paddle.Tensor] = None
x: paddle.Tensor, /, *, complex_mode="jax", out: Optional[paddle.Tensor] = None
) -> paddle.Tensor:
if paddle.is_complex(x):
return 1.0 / (1.0 + paddle_backend.exp(-x))
if x.dtype in unsupported_dtypes:
if paddle.is_complex(x):
return 1 / (1 + paddle_backend.exp(-x))
return F.sigmoid(x.cast("float32")).cast(x.dtype)
return F.sigmoid(x)

Expand Down
4 changes: 3 additions & 1 deletion ivy/functional/backends/tensorflow/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ def relu(x: Tensor, /, *, complex_mode="jax", out: Optional[Tensor] = None) -> T
return tf.nn.relu(x)


def sigmoid(x: Tensor, /, *, out: Optional[Tensor] = None) -> Tensor:
def sigmoid(
x: Tensor, /, *, complex_mode="jax", out: Optional[Tensor] = None
) -> Tensor:
if not ivy.is_array(x):
x = float(x)
return tf.nn.sigmoid(x)
Expand Down
4 changes: 3 additions & 1 deletion ivy/functional/backends/torch/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,9 @@ def gelu(


@with_unsupported_dtypes({"2.0.1 and below": ("float16",)}, backend_version)
def sigmoid(x: torch.Tensor, /, *, out: Optional[torch.Tensor] = None) -> torch.Tensor:
def sigmoid(
x: torch.Tensor, /, *, complex_mode="jax", out: Optional[torch.Tensor] = None
) -> torch.Tensor:
if not ivy.is_array(x):
x = torch.tensor(x)
return torch.sigmoid(x, out=out)
Expand Down
2 changes: 1 addition & 1 deletion ivy/functional/frontends/jax/nn/non_linear_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def selu(x):
@to_ivy_arrays_and_back
def sigmoid(x):
x = _type_conversion(x)
ret = ivy.sigmoid(x)
ret = ivy.sigmoid(x, complex_mode="jax")
return ivy.astype(ret, x.dtype)


Expand Down
6 changes: 6 additions & 0 deletions ivy/functional/frontends/paddle/tensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,9 @@ def rsqrt_(x, name=None):
@to_ivy_arrays_and_back
def sqrt_(x, name=None):
return ivy.inplace_update(x, sqrt(x))


@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def subtract_(x, y, name=None):
return ivy.inplace_update(x, subtract(x, y))
17 changes: 17 additions & 0 deletions ivy/functional/frontends/tensorflow/signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,15 @@ def idct(input, type=2, n=None, axis=-1, norm=None, name=None):
return ivy.dct(input, type=inverse_type, n=n, axis=axis, norm=norm)


# kaiser_bessel_derived_window
@handle_tf_dtype
@to_ivy_arrays_and_back
def kaiser_bessel_derived_window(
window_length, beta=12.0, dtype=ivy.float32, name=None
):
return ivy.kaiser_bessel_derived_window(window_length, beta=beta, dtype=dtype)


@with_supported_dtypes(
{"2.13.0 and below": ("float32", "float64", "float16", "bfloat16")},
"tensorflow",
Expand All @@ -36,3 +45,11 @@ def kaiser_window(window_length, beta=12.0, dtype=ivy.float32, name=None):
@to_ivy_arrays_and_back
def vorbis_window(window_length, dtype=ivy.float32, name=None):
return ivy.vorbis_window(window_length, dtype=dtype, out=None)


kaiser_bessel_derived_window.supported_dtypes = (
"float32",
"float64",
"float16",
"bfloat16",
)
10 changes: 9 additions & 1 deletion ivy/functional/ivy/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,8 +378,13 @@ def relu(
@to_native_arrays_and_back
@handle_array_function
@handle_device_shifting
@handle_complex_input
def sigmoid(
x: Union[ivy.Array, ivy.NativeArray], /, *, out: Optional[ivy.Array] = None
x: Union[ivy.Array, ivy.NativeArray],
/,
*,
complex_mode: Literal["split", "magnitude", "jax"] = "jax",
out: Optional[ivy.Array] = None,
) -> ivy.Array:
"""
Apply the sigmoid function element-wise.
Expand All @@ -388,6 +393,9 @@ def sigmoid(
----------
x
input array.
complex_mode
optional specifier for how to handle complex data types. See
``ivy.func_wrapper.handle_complex_input`` for more detail.
out
optional output array, for writing the result to. It must have a shape that the
input broadcast to.
Expand Down
Loading

0 comments on commit 97a98ef

Please sign in to comment.