Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
NripeshN committed Feb 8, 2024
2 parents 12f5a5a + 4173945 commit 46d4353
Show file tree
Hide file tree
Showing 74 changed files with 1,185 additions and 431 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/test-ivy-cron-gpu.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: test-ivy-cron-gpu
on:
workflow_dispatch:
schedule:
- cron: '25 * * * *'
# schedule:
# - cron: '25 * * * *'
permissions:
actions: read
concurrency:
Expand Down
8 changes: 4 additions & 4 deletions .vaunt/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ achievements:
- trigger:
actor: author
action: pull_request
condition: count(merged = true & labels in {'Priority PR'} & created_at >= "2023-12-26") >= 1
condition: count(merged = true & labels in {'Priority'} & created_at >= "2023-12-26") >= 1

- achievement:
name: Ivy Inspectors
Expand Down Expand Up @@ -69,7 +69,7 @@ achievements:
- trigger:
actor: author
action: pull_request
condition: count(merged = true & labels in {'Priority PR'} & created_at >= "2023-12-26") >= 5
condition: count(merged = true & labels in {'Priority'} & created_at >= "2023-12-26") >= 5

- achievement:
name: Ivy Inspectors Bronze
Expand Down Expand Up @@ -109,7 +109,7 @@ achievements:
- trigger:
actor: author
action: pull_request
condition: count(merged = true & labels in {'Priority PR'} & created_at >= "2023-12-26") >= 15
condition: count(merged = true & labels in {'Priority'} & created_at >= "2023-12-26") >= 15

- achievement:
name: Ivy Inspectors Silver
Expand Down Expand Up @@ -149,7 +149,7 @@ achievements:
- trigger:
actor: author
action: pull_request
condition: count(merged = true & labels in {'Priority PR'} & created_at >= "2023-12-26") >= 30
condition: count(merged = true & labels in {'Priority'} & created_at >= "2023-12-26") >= 30

- achievement:
name: Ivy Inspectors Gold
Expand Down
7 changes: 7 additions & 0 deletions docker/gpu_framework_directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,13 @@ def install_pkg(path, pkg, base="fw/"):
" --no-cache-dir",
shell=True,
)
subprocess.run(
f"yes |pip3 install --upgrade torchvision --index-url"
f" https://download.pytorch.org/whl/cu121 --target"
f" {path} --default-timeout=100 --extra-index-url"
" --no-cache-dir",
shell=True,
)
elif pkg.split("==")[0] if "==" in pkg else pkg == "jax":
subprocess.run(
f"yes |pip install --upgrade --target {path} 'jax[cuda12_pip]' -f"
Expand Down
2 changes: 1 addition & 1 deletion docker/requirement_mappings_gpu.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
"jax": ["dm-haiku", "flax"],
"numpy": ["numpy"],
"mxnet": ["mxnet"],
"torch": ["torch-scatter", "torchvision"],
"torch": ["torchvision", "torch-scatter"],
"tensorflow": ["tensorflow-probability"]
}
2 changes: 1 addition & 1 deletion docs/demos
Submodule demos updated from 358cdc to 20b1a8
15 changes: 9 additions & 6 deletions docs/overview/volunteer_ranks.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Contributor Leaderboard
=======================

This page lists all of our amazing Contributors who have contributed to the project! We are grateful for your contributions and we hope to see you grow with the project! The ranks listed here are based on the `level of contribution <contributing/contributor_rewards.rst>`_\.
This page lists all of our amazing Contributors who have contributed to the project! We are grateful for your contributions and we hope to see you grow with the project! The ranks listed here are based on the `level of contribution <https://unify.ai/docs/ivy/overview/contributing/volunteer_workflow.html>`_\.

Top Contributors
----------------
Expand All @@ -15,15 +15,18 @@ Top Contributors
* - V\. Sai Suraj
- `Sai-Suraj-27 <https://github.com/Sai-Suraj-27>`_
- Merging Master Gold, Debugging Dynamo Silver
Rising Contributors
-------------------
* - samunder singh
- `samthakur587 <https://github.com/samthakur587>`_
- Merging Master Gold, Debugging Dynamo Silver
Core Contributors
-----------------
.. list-table::
:widths: 50 50 50
:header-rows: 1

* - Name
- Github ID
- Badges
* - samunder singh
- `samthakur587 <https://github.com/samthakur587>`_
- Merging Master Silver, Debugging Dynamo Silver
* - Muhammad ishaque
- `MuhammadNizamani <https://github.com/MuhammadNizamani>`_
- Merging Master Bronze
2 changes: 1 addition & 1 deletion ivy/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.0.6.2"
__version__ = "0.0.7.0"
2 changes: 1 addition & 1 deletion ivy/data_classes/array/experimental/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ def hardtanh(
>>> x = ivy.array([-1., .2, 1.])
>>> y = x.hardtanh()
>>> print(y)
ivy.array([-1., 1., 1.])
ivy.array([-1. , 0.2, 1. ])
"""
return ivy.hardtanh(self._data, min_val=min_val, max_val=max_val, out=out)

Expand Down
31 changes: 31 additions & 0 deletions ivy/data_classes/array/experimental/elementwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -1191,3 +1191,34 @@ def erfc(
ivy.array([1.00000000e+00, 1.84270084e+00, 2.80259693e-45])
"""
return ivy.erfc(self._data, out=out)

def erfinv(
self: ivy.Array,
/,
*,
out: Optional[ivy.Array] = None,
) -> ivy.Array:
"""ivy.Array instance method variant of ivy.erfinv. This method simply
wraps the function, and so the docstring for ivy.erfinv also applies to
this method with minimal changes.
Parameters
----------
self
Input array with real or complex valued argument.
out
Alternate output array in which to place the result.
The default is None.
Returns
-------
ret
Values of the inverse error function.
Examples
--------
>>> x = ivy.array([0, -1., 10.])
>>> x.erfinv()
ivy.array([1.00000000e+00, 1.84270084e+00, 2.80259693e-45])
"""
return ivy.erfinv(self._data, out=out)
4 changes: 2 additions & 2 deletions ivy/data_classes/array/experimental/linear_algebra.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,7 +832,7 @@ def general_inner_product(
>>> a = ivy.array([1, 2, 3])
>>> b = ivy.array([4, 5, 6])
>>> result = a.general_inner_product(b, n_modes=1)
>>> result = a.general_inner_product(b, 1)
>>> print(result)
ivy.array(32)
Expand All @@ -844,7 +844,7 @@ def general_inner_product(
>>> a = ivy.array([[1, 1], [1, 1]])
>>> b = ivy.array([[1, 2, 3, 4],[1, 1, 1, 1]])
>>> result = a.general_inner_product(b, n_modes=1)
>>> result = a.general_inner_product(b, 1)
>>> print(result)
ivy.array([[2, 3, 4, 5],
[2, 3, 4, 5]])
Expand Down
6 changes: 3 additions & 3 deletions ivy/data_classes/array/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def log_poisson_loss(
ivy.array([1.28402555, 1.28402555, 1.03402555, 1.28402555])
>>> z = ivy.array([0.1, 0.1, 0.7, 0.1])
>>> loss = x.x.log_poisson_loss(z, reduction='mean')
>>> loss = x.log_poisson_loss(z, reduction='mean')
>>> print(loss)
ivy.array(1.1573164)
"""
Expand Down Expand Up @@ -353,9 +353,9 @@ def poisson_nll_loss(
--------
>>> input_tensor = ivy.array([1, 2, 3, 4], dtype=ivy.float64)
>>> target_tensor = ivy.array([2, 2, 2, 2], dtype=ivy.float64)
>>> loss = poisson_nll_loss(input_tensor, target_tensor, log_input=True)
>>> loss = input_tensor.poisson_nll_loss(target_tensor, log_input=True)
>>> print(loss)
ivy.array(16.1978)
ivy.array(16.1977562)
"""
return ivy.poisson_nll_loss(
self._data,
Expand Down
58 changes: 58 additions & 0 deletions ivy/data_classes/array/experimental/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1134,6 +1134,64 @@ def take(
self, indices, axis=axis, mode=mode, fill_value=fill_value, out=out
)

def unflatten(
self: ivy.Array,
/,
shape: Union[Tuple[int], ivy.Array, ivy.NativeArray],
dim: Optional[int] = 0,
*,
out: Optional[ivy.Array] = None,
) -> ivy.Array:
"""ivy.Array instance method variant of ivy.unflatten. This method
simply wraps the function, and so the docstring for ivy.unflatten also
applies to this method with minimal changes.
Parameters
----------
self
input array
shape
array indices. Must have an integer data type.
dim
axis over which to unflatten. If `axis` is negative,
the function must determine the axis along which to select values
by counting from the last dimension.
By default, the flattened input array is used.
out
optional output array, for writing the result to. It must
have a shape that the inputs broadcast to.
Returns
-------
ret
an array having the same data type as `x`.
The output array must have the same rank
(i.e., number of dimensions) as `x` and
must have the same shape as `x`,
except for the axis specified by `dim`
which is replaced with a tuple specified in `shape`.
Examples
--------
With 'ivy.Array' input:
>>> x = ivy.array([[1.2, 2.3, 3.4, 4.5],
... [5.6, 6.7, 7.8, 8.9]])
>>> dim = 1
>>> shape = (2, 2)
>>> y = ivy.zeros([2, 2, 2])
>>> x.unflatten(shape=shape, dim=dim, out=y)
>>> print(y)
ivy.array([[[1.2, 2.3], [3.4, 4.5]], [[5.6, 6.7], [7.8, 8.9]]])
"""
return ivy.unflatten(
self._data,
shape=shape,
dim=dim,
out=out,
)

def trim_zeros(
self: ivy.Array,
/,
Expand Down
6 changes: 3 additions & 3 deletions ivy/data_classes/array/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def cross_entropy(
>>> y = ivy.array([0.25, 0.25, 0.25, 0.25])
>>> z = x.cross_entropy(y)
>>> print(z)
ivy.array(1.3862944)
ivy.array(0.34657359)
"""
return ivy.cross_entropy(
self._data, pred, axis=axis, epsilon=epsilon, reduction=reduction, out=out
Expand Down Expand Up @@ -110,7 +110,7 @@ def binary_cross_entropy(
>>> y = ivy.array([0.7, 0.8, 0.2])
>>> z = x.binary_cross_entropy(y)
>>> print(z)
ivy.array([0.357, 0.223, 0.223])
ivy.array(0.26765382)
"""
return ivy.binary_cross_entropy(
self._data,
Expand Down Expand Up @@ -170,7 +170,7 @@ def sparse_cross_entropy(
>>> y = ivy.array([0.7, 0.8, 0.2])
>>> z = x.sparse_cross_entropy(y)
>>> print(z)
ivy.array([0.223, 0.223, 0.357])
ivy.array([0.07438118, 0.07438118, 0.11889165])
"""
return ivy.sparse_cross_entropy(
self._data, pred, axis=axis, epsilon=epsilon, reduction=reduction, out=out
Expand Down
13 changes: 6 additions & 7 deletions ivy/data_classes/container/experimental/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -968,10 +968,10 @@ def _static_hardtanh(
Examples
--------
>>> x = x = ivy.Container(a=ivy.array([0.39, -2.0]), b=ivy.array([2., -0.2]))
>>> y = ivy.Container.static_hardtanh(x)
>>> y = ivy.Container._static_hardtanh(x)
>>> print(y)
{
a: ivy.array([0.39, -1.]),
a: ivy.array([0.3899, -1.]),
b: ivy.array([1., -0.2])
}
"""
Expand Down Expand Up @@ -1033,11 +1033,11 @@ def hardtanh(
Examples
--------
>>> x = x = ivy.Container(a=ivy.array([0.39, -2.0]), b=ivy.array([2., -0.2]))
>>> y = ivy.Container.static_hardtanh(x)
>>> x = ivy.Container(a=ivy.array([0.39, -2.0]), b=ivy.array([2., -0.2]))
>>> y = ivy.Container.hardtanh(x)
>>> print(y)
{
a: ivy.array([0.39, -1.]),
a: ivy.array([0.389999, -1.]),
b: ivy.array([1., -0.2])
}
"""
Expand Down Expand Up @@ -1820,8 +1820,7 @@ def hardshrink(
Examples
--------
>>> import ivy.numpy as np
>>> x = ivy.Container(a=np.array([1., -2.]), b=np.array([0.4, -0.2]))
>>> x = ivy.Container(a=ivy.array([1., -2.]), b=ivy.array([0.4, -0.2]))
>>> y = ivy.Container.hardshrink(x)
>>> print(y)
{
Expand Down
Loading

0 comments on commit 46d4353

Please sign in to comment.