1
0

refactor: merge multiple project into one and create new project

This commit is contained in:
2026-04-07 08:30:41 +08:00
parent 7aa7ae3335
commit 6cb1a89751
49 changed files with 2932 additions and 4 deletions

10
d2l/.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
# Python-generated files
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
# Virtual environments
.venv

1
d2l/.python-version Normal file
View File

@@ -0,0 +1 @@
3.11

3
d2l/README.md Normal file
View File

@@ -0,0 +1,3 @@
# D2L Learning
The code written with [Dive Into Deep Learning](https://zh.d2l.ai/) course.

6
d2l/main.py Normal file
View File

@@ -0,0 +1,6 @@
def main():
print("Hello from learning!")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,132 @@
import torch
def main():
# print('===== Chapter 1 =====')
# c1_introduction()
# print('===== Chapter 2 =====')
# c2_operator()
# print('===== Chapter 3 =====')
# c3_broadcast()
# print('===== Chapter 4 =====')
# c4_index_and_slice()
# print('===== Chapter 5 =====')
# c5_save_memory()
print('===== Chapter 6 =====')
c6_into_python_object()
def c1_introduction():
x = torch.arange(12)
print(f'x: {x}')
print(f'x.shape: {x.shape}')
print(f'x.numel(): {x.numel()}')
xs = x.reshape(3, 4)
print(f'x.reshape: {xs}')
xs = x.reshape(-1, 4)
print(f'x.reshape auto 1: {xs}')
xs = x.reshape(3, -1)
print(f'x.reshape auto 2: {xs}')
zeros = torch.zeros((2, 3, 4))
print(f'zeros: {zeros}')
ones = torch.ones((2, 3, 4))
print(f'ones: {ones}')
randoms = torch.randn(3, 4)
print(f'randn: {randoms}')
manual = torch.tensor([[2, 1, 4, 3], [1, 2, 3, 4], [4, 3, 2, 1]], )
print(f'manual: {manual}')
# 看起来reshape第一个是行数
manual = torch.tensor([2, 1, 4, 3, 1, 2, 3, 4, 4, 3, 2, 1]).reshape(3, -1)
print(f'manual: {manual}')
def c2_operator():
# torch按类型自动决定dtype
x = torch.tensor([1.0, 2, 4, 8])
print(x.dtype)
x = torch.tensor([1, 2, 4, 8])
print(x.dtype)
# 强制指定dtype
x = torch.tensor([1, 2, 4, 8], dtype=torch.float32)
y = torch.tensor([2, 2, 2, 2])
print(f'x + y: {x + y}')
print(f'x - y: {x - y}')
print(f'x * y: {x * y}')
print(f'x / y: {x / y}')
print(f'x ** y: {x ** y}')
print(f'exp(x): {torch.exp(x)}')
x = torch.arange(12, dtype=torch.float32).reshape(3, 4)
y = torch.tensor([[2, 1, 4, 3], [1, 2, 3, 4], [4, 3, 2, 1]],
dtype=torch.float32)
xy_row = torch.cat((x, y), dim=0)
xy_col = torch.cat((x, y), dim=1)
print(f'Row Cat: {xy_row}')
print(f'Column Cat: {xy_col}')
xy_equal = x == y
print(f'Equal Boolean: {xy_equal}')
x_sum = x.sum()
print(f'x.sum: {x_sum}')
def c3_broadcast():
a = torch.arange(3).reshape(3, -1)
b = torch.arange(2).reshape(-1, 2)
print(a)
print(b)
print(f'a + b: {a + b}')
def c4_index_and_slice():
x = torch.arange(12, dtype=torch.float32).reshape(3, 4)
print(x)
print(x[-1])
print(x[1:3])
print(x[0::2])
print(x[:, 0::2])
x[1, 2] = 9
print(x)
x[:, 0::2] = 0
print(x)
y = torch.arange(6).reshape(-1, 2)
x[:, 0::2] = y
print(x)
def c5_save_memory():
x = torch.arange(12, dtype=torch.float32).reshape(3, 4)
y = torch.arange(12, dtype=torch.float32).reshape(3, 4)
z = torch.zeros_like(x)
z[:] = x + y
print(z)
z[:, :] = 0
z[:] = x
z += y
print(z)
def c6_into_python_object():
x = torch.arange(12, dtype=torch.float32).reshape(3, 4)
a = x.numpy()
print(type(a))
b = torch.tensor(a)
print(type(b))
x = torch.tensor([3.5])
print(x)
print(x.item())
print(float(x))
print(int(x))
if __name__ == "__main__":
main()

26
d2l/pyproject.toml Normal file
View File

@@ -0,0 +1,26 @@
[project]
name = "learning"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"datasets>=4.3.0",
"matplotlib>=3.10.7",
"numpy>=2.3.4",
"torch>=2.9.0",
"torchvision>=0.24.0",
]
[tool.uv.sources]
torch = [
{ index = "pytorch-cu126", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
]
torchvision = [
{ index = "pytorch-cu126", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
]
[[tool.uv.index]]
name = "pytorch-cu126"
url = "https://download.pytorch.org/whl/cu126"
explicit = true

2205
d2l/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff