Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
edf6a94
Update mnist.py
co63oc May 22, 2023
7abc8d9
Merge pull request #518 from 514flowey/complex
514flowey May 16, 2024
e90fbac
Merge pull request #534 from fansunqi/master
LDYang694 May 16, 2024
04a02e0
polish PixelShuffle in nn.py
LDYang694 May 16, 2024
233c0e7
Merge pull request #538 from fansunqi/issue525_branch
LDYang694 May 16, 2024
a8f51c4
Merge pull request #537 from fansunqi/issue527_branch
LDYang694 May 16, 2024
a2316de
Merge pull request #535 from fansunqi/issue529_branch
LDYang694 May 16, 2024
7bb9ce2
Merge pull request #536 from fansunqi/issue528_branch
LDYang694 May 16, 2024
b506d63
Merge pull request #443 from co63oc/patch-1
LDYang694 May 16, 2024
e001b4c
polish rocm support
LDYang694 May 20, 2024
26b2cf0
Merge pull request #543 from LDYang694/master
LDYang694 May 20, 2024
f645a07
Merge pull request #541 from fansunqi/issue521_branch
LDYang694 May 20, 2024
f2644d4
Merge pull request #540 from fansunqi/issue522_branch
LDYang694 May 20, 2024
640af86
Merge pull request #539 from fansunqi/issue523_branch
LDYang694 May 20, 2024
05f4cf3
Update version to 1.3.9.8
LDYang694 May 20, 2024
e69e1f7
Merge branch 'master' into master
uyzhang May 21, 2024
419bf3c
Merge pull request #533 from uyzhang/master
uyzhang May 21, 2024
7714ce3
fix: a minimal quick fix for issue #544
zhc7 May 22, 2024
c334324
Merge pull request #545 from zhc7/patch-1
LDYang694 May 28, 2024
5df1673
fix: jt.Var.expand with valid index -1
May 29, 2024
14de5fa
a IndexError fix of issue #448
May 30, 2024
862bce9
a ValueError fix of issue #450
May 30, 2024
cd8b19a
fix illegal parameters of Pool and Pool3d of issue #451,#453,#456,#457
May 30, 2024
793d638
fix illegal parameters of Conv2d issue #471,#472,#473,#474,#475,#476,…
May 30, 2024
9e60eb6
fix illegal parameters of PixelShuffle of issue #458,fix validity of …
May 30, 2024
9a23f5c
check x.shape and kernel_size of Pool and Pool3d,issue #461,#463
May 31, 2024
9d7e634
fix Pad2d with illegal padding,issue #464,#465,#466,#467
May 31, 2024
c79142d
fix illegal parameters of ConvTranspose and Pool,issue #478,#480,#481…
May 31, 2024
6967475
Update README.md
LDYang694 Jun 4, 2024
b1f18f0
Merge pull request #546 from Hanyx2021/fix-expand
LDYang694 Jun 5, 2024
8d26bb8
polish nn.Sequential attribute
LDYang694 Jun 5, 2024
0dc433d
check input shape and scale factor's positiveness in jt.nn.Upsample
Jun 10, 2024
ca63d37
resume
Jun 10, 2024
0ea0fd9
Update setup.py
LDYang694 Jun 25, 2024
7416cfb
update version
LDYang694 Jun 25, 2024
21e7409
check parameters' positive in jt.nn.fold
fansunqi Jul 1, 2024
8454a7a
Merge branch 'Jittor:master' into fold
fansunqi Jul 1, 2024
5ff687d
Merge pull request #561 from fansunqi/fold
LDYang694 Jul 2, 2024
b2f7f26
update version
LDYang694 Jul 2, 2024
7852283
add isin
LDYang694 Jul 5, 2024
7140dd1
Fix PReLU Broadcasting Bug for Multiple Parameters
hishambarakat16 Jul 5, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -382,10 +382,10 @@ Email: [email protected]

File an issue: https://github.com/Jittor/jittor/issues

QQ Group: 761222083
QQ Group: 836860279


<img src="https://cg.cs.tsinghua.edu.cn/jittor/images/news/2020-12-8-21-19-1_2_2/fig4.png" width="200"/>
<img src="https://github.com/Jittor/jittor/assets/62846124/8dd830bd-b31c-4e4f-9a78-5fd7a3409145" width="200"/>

## The Team

Expand Down
27 changes: 25 additions & 2 deletions python/jittor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# file 'LICENSE.txt', which is part of this source code package.
# ***************************************************************

__version__ = '1.3.9.6'
__version__ = '1.3.9.10'
from jittor_utils import lock
with lock.lock_scope():
ori_int = int
Expand Down Expand Up @@ -428,7 +428,9 @@ def random(shape, dtype="float32", type="uniform"):
jt.Var([[0.96788853 0.28334728 0.30482838]
[0.46107793 0.62798643 0.03457401]], dtype=float32)
'''

for dim in shape:
if dim < 0:
raise RuntimeError(f"Trying to create tensor with negative dimension {dim}: {shape}")
ret = ops.random(shape, "float32", type)
## TODO: move those code to core
#if dtype in ["float16", "bfloat16"]:
Expand Down Expand Up @@ -484,6 +486,9 @@ def ones(*shape, dtype="float32"):
shape = shape[:-1]
if isinstance(shape, tuple) and isinstance(shape[0], (Sequence, NanoVector)):
shape = shape[0]
for dim in shape:
if dim < 0:
raise RuntimeError(f"Trying to create tensor with negative dimension {dim}: {shape}")
return unary(1, dtype).broadcast(shape)

def new_ones(x, size):
Expand Down Expand Up @@ -515,6 +520,9 @@ def zeros(*shape, dtype="float32"):
shape = shape[:-1]
if isinstance(shape, tuple) and isinstance(shape[0], (Sequence, NanoVector)):
shape = shape[0]
for dim in shape:
if dim < 0:
raise RuntimeError(f"Trying to create tensor with negative dimension {dim}: {shape}")
return unary(0, dtype).broadcast(shape)

def new_zeros(x, size):
Expand Down Expand Up @@ -547,6 +555,9 @@ def full(shape,val,dtype="float32"):
'''
if not isinstance(shape, (NanoVector, Sequence)):
shape = (shape,)
for dim in shape:
if dim < 0:
raise RuntimeError(f"Trying to create tensor with negative dimension {dim}: {shape}")
return unary(val, dtype).broadcast(shape)

def new_full(x, size, val):
Expand Down Expand Up @@ -687,6 +698,8 @@ def flatten(input, start_dim=0, end_dim=-1):
start_dim = len(in_shape) + start_dim if start_dim < 0 else start_dim
end_dim = len(in_shape) + end_dim if end_dim < 0 else end_dim
assert end_dim >= start_dim, "end_dim should be larger than or equal to start_dim for flatten function"
if len(in_shape) <= end_dim:
raise IndexError(f"Dimension out of range (expected to be in range of [{-len(in_shape)}, {len(in_shape) - 1}], but got {end_dim})")
out_shape = []
for i in range(0,start_dim,1): out_shape.append(in_shape[i])
dims = 1
Expand Down Expand Up @@ -917,6 +930,9 @@ def randn(*size, dtype="float32", requires_grad=True) -> Var:
[-0.612632 -1.1471151 -1.1879086 ]], dtype=float32)
'''
if isinstance(size, tuple) and isinstance(size[0], (tuple, list, NanoVector)): size = size[0]
for dim in size:
if dim < 0:
raise RuntimeError(f"Trying to create tensor with negative dimension {dim}: {size}")
arr = jt.random(size, dtype, "normal")
if not requires_grad: return arr.stop_grad()
return arr
Expand Down Expand Up @@ -1013,6 +1029,9 @@ def randint(low, high=None, shape=(1,), dtype="int32") -> Var:
[1 1 1]], dtype=int32)
'''
if high is None: low, high = 0, low
for dim in shape:
if dim < 0:
raise RuntimeError(f"Trying to create tensor with negative dimension {dim}: {shape}")
v = (jt.random(shape) * (high - low) + low).clamp(low, high-0.5)
v = jt.floor_int(v)
return v.astype(dtype)
Expand Down Expand Up @@ -2152,3 +2171,7 @@ def inplace_wrapper(new_k, prev_func):
from . import math_util
from .math_util import *
from . import distributions

if jt.compiler.has_acl:
from jittor.extern.acl.acl_compiler import change_function
change_function()
Loading