add a few lectures

This commit is contained in:
Frank Xu
2025-05-02 21:54:52 -04:00
parent 02953d8608
commit ac1c788fe9
22 changed files with 260097 additions and 201 deletions

View File

@@ -1,2 +1,4 @@
pip install ipywidgets
pip install scikit-learn
pip install scikit-learn
pip install ultralytics
pip install ultralytics opencv-python

View File

@@ -19,7 +19,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 62,
"id": "323b62e8",
"metadata": {},
"outputs": [
@@ -29,7 +29,7 @@
"6"
]
},
"execution_count": 2,
"execution_count": 62,
"metadata": {},
"output_type": "execute_result"
}
@@ -40,7 +40,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 63,
"id": "6cdd612e",
"metadata": {},
"outputs": [
@@ -50,7 +50,7 @@
"8"
]
},
"execution_count": 3,
"execution_count": 63,
"metadata": {},
"output_type": "execute_result"
}
@@ -61,7 +61,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 64,
"id": "5e7c9a36",
"metadata": {},
"outputs": [
@@ -71,7 +71,7 @@
"1"
]
},
"execution_count": 4,
"execution_count": 64,
"metadata": {},
"output_type": "execute_result"
}
@@ -82,7 +82,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 65,
"id": "c497160d",
"metadata": {},
"outputs": [
@@ -101,7 +101,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 66,
"id": "e81329e5",
"metadata": {},
"outputs": [
@@ -111,7 +111,7 @@
"[1, 2, 3]"
]
},
"execution_count": 6,
"execution_count": 66,
"metadata": {},
"output_type": "execute_result"
}
@@ -123,7 +123,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 67,
"id": "0f886f3a",
"metadata": {},
"outputs": [
@@ -133,7 +133,7 @@
"1"
]
},
"execution_count": 7,
"execution_count": 67,
"metadata": {},
"output_type": "execute_result"
}
@@ -144,7 +144,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 68,
"id": "bd694c5c",
"metadata": {},
"outputs": [
@@ -154,7 +154,7 @@
"[1, 2, 3, 4]"
]
},
"execution_count": 8,
"execution_count": 68,
"metadata": {},
"output_type": "execute_result"
}
@@ -166,7 +166,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 69,
"id": "f47c854d",
"metadata": {},
"outputs": [
@@ -176,7 +176,7 @@
"4"
]
},
"execution_count": 9,
"execution_count": 69,
"metadata": {},
"output_type": "execute_result"
}
@@ -187,7 +187,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 70,
"id": "e2d7b456",
"metadata": {},
"outputs": [
@@ -197,7 +197,7 @@
"(1, 2, 3)"
]
},
"execution_count": 10,
"execution_count": 70,
"metadata": {},
"output_type": "execute_result"
}
@@ -209,7 +209,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 71,
"id": "4e37998a",
"metadata": {},
"outputs": [
@@ -219,7 +219,7 @@
"1"
]
},
"execution_count": 11,
"execution_count": 71,
"metadata": {},
"output_type": "execute_result"
}
@@ -238,7 +238,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 72,
"id": "d9c0f925",
"metadata": {},
"outputs": [
@@ -248,7 +248,7 @@
"[1, 4, 9]"
]
},
"execution_count": 1,
"execution_count": 72,
"metadata": {},
"output_type": "execute_result"
}
@@ -259,7 +259,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 73,
"id": "799c1063",
"metadata": {},
"outputs": [
@@ -269,7 +269,7 @@
"[1, 4, 9, 16, 25]"
]
},
"execution_count": 4,
"execution_count": 73,
"metadata": {},
"output_type": "execute_result"
}
@@ -282,7 +282,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 74,
"id": "7a9d00ab",
"metadata": {},
"outputs": [
@@ -292,7 +292,7 @@
"[0, 2, 4, 6, 8]"
]
},
"execution_count": 6,
"execution_count": 74,
"metadata": {},
"output_type": "execute_result"
}
@@ -305,7 +305,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 75,
"id": "0b3a893b",
"metadata": {},
"outputs": [
@@ -315,7 +315,7 @@
"[1, 2]"
]
},
"execution_count": 8,
"execution_count": 75,
"metadata": {},
"output_type": "execute_result"
}
@@ -337,7 +337,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 76,
"id": "0df3abc9",
"metadata": {},
"outputs": [],
@@ -347,7 +347,7 @@
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": 77,
"id": "2a7a1daf",
"metadata": {},
"outputs": [
@@ -357,7 +357,7 @@
"array([1, 2, 3, 4, 5, 6])"
]
},
"execution_count": 13,
"execution_count": 77,
"metadata": {},
"output_type": "execute_result"
}
@@ -369,7 +369,7 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": 78,
"id": "1f8e61c2",
"metadata": {},
"outputs": [
@@ -379,7 +379,7 @@
"np.int64(1)"
]
},
"execution_count": 14,
"execution_count": 78,
"metadata": {},
"output_type": "execute_result"
}
@@ -390,7 +390,7 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": 79,
"id": "61e20947",
"metadata": {},
"outputs": [],
@@ -400,7 +400,7 @@
},
{
"cell_type": "code",
"execution_count": 16,
"execution_count": 80,
"id": "18251de4",
"metadata": {},
"outputs": [
@@ -410,7 +410,7 @@
"np.float32(2.0)"
]
},
"execution_count": 16,
"execution_count": 80,
"metadata": {},
"output_type": "execute_result"
}
@@ -421,7 +421,7 @@
},
{
"cell_type": "code",
"execution_count": 17,
"execution_count": 81,
"id": "11f35a16",
"metadata": {},
"outputs": [
@@ -431,7 +431,7 @@
"array([1, 2, 3])"
]
},
"execution_count": 17,
"execution_count": 81,
"metadata": {},
"output_type": "execute_result"
}
@@ -442,7 +442,7 @@
},
{
"cell_type": "code",
"execution_count": 18,
"execution_count": 82,
"id": "57ef6d6c",
"metadata": {},
"outputs": [
@@ -452,7 +452,7 @@
"array([4, 5, 6])"
]
},
"execution_count": 18,
"execution_count": 82,
"metadata": {},
"output_type": "execute_result"
}
@@ -463,7 +463,7 @@
},
{
"cell_type": "code",
"execution_count": 19,
"execution_count": 83,
"id": "db3cc4e7",
"metadata": {},
"outputs": [
@@ -475,7 +475,7 @@
" [ 9, 10, 11, 12]])"
]
},
"execution_count": 19,
"execution_count": 83,
"metadata": {},
"output_type": "execute_result"
}
@@ -487,7 +487,7 @@
},
{
"cell_type": "code",
"execution_count": 20,
"execution_count": 84,
"id": "7ca7a696",
"metadata": {},
"outputs": [
@@ -497,7 +497,7 @@
"np.int64(8)"
]
},
"execution_count": 20,
"execution_count": 84,
"metadata": {},
"output_type": "execute_result"
}
@@ -506,6 +506,39 @@
"a[1, 3]"
]
},
{
"cell_type": "markdown",
"id": "3d2d0b6f",
"metadata": {},
"source": [
"The function calculates the step size as:\n",
"\n",
"step = (stop - start) / (num - 1) = (10 - (-10)) / (11 - 1) = 20/10 ≈ 2"
]
},
{
"cell_type": "code",
"execution_count": 85,
"id": "53d75c62",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([-10., -8., -6., -4., -2., 0., 2., 4., 6., 8., 10.])"
]
},
"execution_count": 85,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Range of z values\n",
"# np.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0)\n",
"np.linspace(-10, 10, 11)"
]
},
{
"cell_type": "markdown",
"id": "0580eca2",
@@ -519,7 +552,7 @@
},
{
"cell_type": "code",
"execution_count": 21,
"execution_count": 86,
"id": "25a9b316",
"metadata": {},
"outputs": [
@@ -559,7 +592,7 @@
},
{
"cell_type": "code",
"execution_count": 22,
"execution_count": 87,
"id": "d86d32ee",
"metadata": {},
"outputs": [
@@ -615,7 +648,7 @@
},
{
"cell_type": "code",
"execution_count": 23,
"execution_count": 88,
"id": "ba1d7074",
"metadata": {},
"outputs": [
@@ -625,7 +658,7 @@
"True"
]
},
"execution_count": 23,
"execution_count": 88,
"metadata": {},
"output_type": "execute_result"
}
@@ -636,7 +669,7 @@
},
{
"cell_type": "code",
"execution_count": 24,
"execution_count": 89,
"id": "124e1812",
"metadata": {},
"outputs": [
@@ -646,7 +679,7 @@
"4"
]
},
"execution_count": 24,
"execution_count": 89,
"metadata": {},
"output_type": "execute_result"
}
@@ -658,7 +691,7 @@
},
{
"cell_type": "code",
"execution_count": 25,
"execution_count": 90,
"id": "f300cebf",
"metadata": {},
"outputs": [
@@ -668,7 +701,7 @@
"3"
]
},
"execution_count": 25,
"execution_count": 90,
"metadata": {},
"output_type": "execute_result"
}
@@ -686,7 +719,7 @@
},
{
"cell_type": "code",
"execution_count": 26,
"execution_count": 91,
"id": "6e9dceae",
"metadata": {},
"outputs": [
@@ -696,7 +729,7 @@
"(3, 2, 4)"
]
},
"execution_count": 26,
"execution_count": 91,
"metadata": {},
"output_type": "execute_result"
}
@@ -707,7 +740,7 @@
},
{
"cell_type": "code",
"execution_count": 27,
"execution_count": 92,
"id": "a5ee181d",
"metadata": {},
"outputs": [
@@ -717,7 +750,7 @@
"24"
]
},
"execution_count": 27,
"execution_count": 92,
"metadata": {},
"output_type": "execute_result"
}
@@ -728,7 +761,7 @@
},
{
"cell_type": "code",
"execution_count": 28,
"execution_count": 93,
"id": "b251a416",
"metadata": {},
"outputs": [
@@ -738,7 +771,7 @@
"dtype('int64')"
]
},
"execution_count": 28,
"execution_count": 93,
"metadata": {},
"output_type": "execute_result"
}
@@ -750,7 +783,7 @@
},
{
"cell_type": "code",
"execution_count": 29,
"execution_count": 94,
"id": "a043cd1c",
"metadata": {},
"outputs": [
@@ -760,7 +793,7 @@
"dtype('float32')"
]
},
"execution_count": 29,
"execution_count": 94,
"metadata": {},
"output_type": "execute_result"
}
@@ -780,7 +813,7 @@
},
{
"cell_type": "code",
"execution_count": 30,
"execution_count": 95,
"id": "75b22e31",
"metadata": {},
"outputs": [
@@ -790,7 +823,7 @@
"array([0., 0.])"
]
},
"execution_count": 30,
"execution_count": 95,
"metadata": {},
"output_type": "execute_result"
}
@@ -801,7 +834,7 @@
},
{
"cell_type": "code",
"execution_count": 31,
"execution_count": 96,
"id": "4b542c1f",
"metadata": {},
"outputs": [
@@ -811,7 +844,7 @@
"array([1., 1.])"
]
},
"execution_count": 31,
"execution_count": 96,
"metadata": {},
"output_type": "execute_result"
}
@@ -822,7 +855,7 @@
},
{
"cell_type": "code",
"execution_count": 32,
"execution_count": 97,
"id": "c22cdd54",
"metadata": {},
"outputs": [
@@ -832,7 +865,7 @@
"array([0, 1, 2, 3])"
]
},
"execution_count": 32,
"execution_count": 97,
"metadata": {},
"output_type": "execute_result"
}
@@ -843,7 +876,7 @@
},
{
"cell_type": "code",
"execution_count": 33,
"execution_count": 98,
"id": "367eacd6",
"metadata": {},
"outputs": [
@@ -853,7 +886,7 @@
"array([2, 4, 6, 8])"
]
},
"execution_count": 33,
"execution_count": 98,
"metadata": {},
"output_type": "execute_result"
}
@@ -864,19 +897,19 @@
},
{
"cell_type": "code",
"execution_count": 34,
"execution_count": 99,
"id": "0fa059e6",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[0.01135293, 0.54482248, 0.33628695, 0.81612888],\n",
" [0.00168462, 0.97332481, 0.86424808, 0.01566007],\n",
" [0.32076747, 0.3997841 , 0.24520416, 0.00826635]])"
"array([[0.84849457, 0.2408882 , 0.26897315, 0.88789736],\n",
" [0.17325996, 0.06966163, 0.81198917, 0.24941808],\n",
" [0.70722305, 0.64643472, 0.07015388, 0.19063198]])"
]
},
"execution_count": 34,
"execution_count": 99,
"metadata": {},
"output_type": "execute_result"
}
@@ -887,19 +920,19 @@
},
{
"cell_type": "code",
"execution_count": 35,
"execution_count": 100,
"id": "eae03b14",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[6, 6, 4, 5],\n",
" [8, 4, 1, 3],\n",
" [3, 4, 9, 6]], dtype=int32)"
"array([[ 1, 1, 10, 1],\n",
" [ 8, 10, 1, 1],\n",
" [ 8, 1, 4, 8]], dtype=int32)"
]
},
"execution_count": 35,
"execution_count": 100,
"metadata": {},
"output_type": "execute_result"
}
@@ -910,18 +943,18 @@
},
{
"cell_type": "code",
"execution_count": 36,
"execution_count": 101,
"id": "86c88e48",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[ 1.61548569, -1.27751798, 2.23823677, 1.87533372],\n",
" [ 1.88733913, 0.59567694, -0.74642328, 0.29124792]])"
"array([[-0.06960131, 0.94400631, -0.05980746, -0.81806564],\n",
" [-0.06661582, -0.71274669, 0.9149406 , 0.63540989]])"
]
},
"execution_count": 36,
"execution_count": 101,
"metadata": {},
"output_type": "execute_result"
}
@@ -944,7 +977,7 @@
},
{
"cell_type": "code",
"execution_count": 37,
"execution_count": 102,
"id": "b26242be",
"metadata": {},
"outputs": [
@@ -954,7 +987,7 @@
"(6,)"
]
},
"execution_count": 37,
"execution_count": 102,
"metadata": {},
"output_type": "execute_result"
}
@@ -966,7 +999,7 @@
},
{
"cell_type": "code",
"execution_count": 38,
"execution_count": 103,
"id": "18db087b",
"metadata": {},
"outputs": [
@@ -976,7 +1009,7 @@
"(1, 6)"
]
},
"execution_count": 38,
"execution_count": 103,
"metadata": {},
"output_type": "execute_result"
}
@@ -988,7 +1021,7 @@
},
{
"cell_type": "code",
"execution_count": 39,
"execution_count": 104,
"id": "f213bce6",
"metadata": {},
"outputs": [
@@ -998,7 +1031,7 @@
"array([[1, 2, 3, 4, 5, 6]])"
]
},
"execution_count": 39,
"execution_count": 104,
"metadata": {},
"output_type": "execute_result"
}
@@ -1009,7 +1042,7 @@
},
{
"cell_type": "code",
"execution_count": 40,
"execution_count": 105,
"id": "25c7b822",
"metadata": {},
"outputs": [
@@ -1024,7 +1057,7 @@
" [6]])"
]
},
"execution_count": 40,
"execution_count": 105,
"metadata": {},
"output_type": "execute_result"
}
@@ -1035,7 +1068,7 @@
},
{
"cell_type": "code",
"execution_count": 41,
"execution_count": 106,
"id": "bbd43b79",
"metadata": {},
"outputs": [
@@ -1046,7 +1079,7 @@
" [4, 5, 6]])"
]
},
"execution_count": 41,
"execution_count": 106,
"metadata": {},
"output_type": "execute_result"
}
@@ -1071,7 +1104,7 @@
},
{
"cell_type": "code",
"execution_count": 42,
"execution_count": 107,
"id": "9af72192",
"metadata": {},
"outputs": [
@@ -1082,7 +1115,7 @@
" [4, 5]])"
]
},
"execution_count": 42,
"execution_count": 107,
"metadata": {},
"output_type": "execute_result"
}
@@ -1098,7 +1131,7 @@
},
{
"cell_type": "code",
"execution_count": 43,
"execution_count": 108,
"id": "4216c512",
"metadata": {},
"outputs": [
@@ -1109,7 +1142,7 @@
" [ 4, 14]])"
]
},
"execution_count": 43,
"execution_count": 108,
"metadata": {},
"output_type": "execute_result"
}
@@ -1124,7 +1157,7 @@
},
{
"cell_type": "code",
"execution_count": 44,
"execution_count": 109,
"id": "0eec244e",
"metadata": {},
"outputs": [
@@ -1136,7 +1169,7 @@
" [6, 6]])"
]
},
"execution_count": 44,
"execution_count": 109,
"metadata": {},
"output_type": "execute_result"
}
@@ -1152,7 +1185,7 @@
},
{
"cell_type": "code",
"execution_count": 45,
"execution_count": 110,
"id": "bb8d128a",
"metadata": {},
"outputs": [
@@ -1162,7 +1195,7 @@
"array([1.6, 3.2])"
]
},
"execution_count": 45,
"execution_count": 110,
"metadata": {},
"output_type": "execute_result"
}
@@ -1179,7 +1212,7 @@
},
{
"cell_type": "code",
"execution_count": 46,
"execution_count": 111,
"id": "af55cce3",
"metadata": {},
"outputs": [
@@ -1191,7 +1224,7 @@
" [10, 20, 30]])"
]
},
"execution_count": 46,
"execution_count": 111,
"metadata": {},
"output_type": "execute_result"
}
@@ -1244,7 +1277,7 @@
},
{
"cell_type": "code",
"execution_count": 47,
"execution_count": 112,
"id": "0194c4b4",
"metadata": {},
"outputs": [
@@ -1254,7 +1287,7 @@
"np.int64(1)"
]
},
"execution_count": 47,
"execution_count": 112,
"metadata": {},
"output_type": "execute_result"
}
@@ -1267,7 +1300,7 @@
},
{
"cell_type": "code",
"execution_count": 48,
"execution_count": 113,
"id": "5b201131",
"metadata": {},
"outputs": [
@@ -1277,7 +1310,7 @@
"6"
]
},
"execution_count": 48,
"execution_count": 113,
"metadata": {},
"output_type": "execute_result"
}
@@ -1290,7 +1323,7 @@
},
{
"cell_type": "code",
"execution_count": 49,
"execution_count": 114,
"id": "e73fbd22",
"metadata": {},
"outputs": [
@@ -1300,7 +1333,7 @@
"array([5, 7])"
]
},
"execution_count": 49,
"execution_count": 114,
"metadata": {},
"output_type": "execute_result"
}
@@ -1314,7 +1347,7 @@
},
{
"cell_type": "code",
"execution_count": 50,
"execution_count": 115,
"id": "01229cfb",
"metadata": {},
"outputs": [],
@@ -1326,7 +1359,7 @@
},
{
"cell_type": "code",
"execution_count": 51,
"execution_count": 116,
"id": "fd882d6c",
"metadata": {},
"outputs": [
@@ -1336,7 +1369,7 @@
"np.float64(2.5)"
]
},
"execution_count": 51,
"execution_count": 116,
"metadata": {},
"output_type": "execute_result"
}
@@ -1348,7 +1381,7 @@
},
{
"cell_type": "code",
"execution_count": 52,
"execution_count": 117,
"id": "ba88c2f7",
"metadata": {},
"outputs": [
@@ -1358,7 +1391,7 @@
"array([2., 3.])"
]
},
"execution_count": 52,
"execution_count": 117,
"metadata": {},
"output_type": "execute_result"
}
@@ -1370,7 +1403,7 @@
},
{
"cell_type": "code",
"execution_count": 53,
"execution_count": 118,
"id": "b49ff53c",
"metadata": {},
"outputs": [
@@ -1380,7 +1413,7 @@
"array([1.5, 3.5])"
]
},
"execution_count": 53,
"execution_count": 118,
"metadata": {},
"output_type": "execute_result"
}
@@ -1392,7 +1425,7 @@
},
{
"cell_type": "code",
"execution_count": 54,
"execution_count": 119,
"id": "fa48f9a5",
"metadata": {},
"outputs": [
@@ -1402,7 +1435,7 @@
"array([1.5, 3.5])"
]
},
"execution_count": 54,
"execution_count": 119,
"metadata": {},
"output_type": "execute_result"
}
@@ -1415,7 +1448,7 @@
},
{
"cell_type": "code",
"execution_count": 55,
"execution_count": 120,
"id": "3ee7e4c9",
"metadata": {},
"outputs": [],
@@ -1440,7 +1473,7 @@
},
{
"cell_type": "code",
"execution_count": 56,
"execution_count": 121,
"id": "6d37c412",
"metadata": {},
"outputs": [
@@ -1476,7 +1509,7 @@
},
{
"cell_type": "code",
"execution_count": 57,
"execution_count": 122,
"id": "b0a7a95f",
"metadata": {},
"outputs": [
@@ -1510,7 +1543,7 @@
},
{
"cell_type": "code",
"execution_count": 58,
"execution_count": 123,
"id": "713cbe61",
"metadata": {},
"outputs": [
@@ -1553,7 +1586,7 @@
},
{
"cell_type": "code",
"execution_count": 59,
"execution_count": 124,
"id": "0ef952bd",
"metadata": {},
"outputs": [
@@ -1601,7 +1634,7 @@
},
{
"cell_type": "code",
"execution_count": 60,
"execution_count": 125,
"id": "5afe68f8",
"metadata": {},
"outputs": [
@@ -1634,7 +1667,7 @@
},
{
"cell_type": "code",
"execution_count": 61,
"execution_count": 126,
"id": "a1c381ad",
"metadata": {},
"outputs": [
@@ -1670,7 +1703,7 @@
},
{
"cell_type": "code",
"execution_count": 62,
"execution_count": 127,
"id": "5085fc72",
"metadata": {},
"outputs": [
@@ -1710,7 +1743,7 @@
},
{
"cell_type": "code",
"execution_count": 63,
"execution_count": 128,
"id": "37870aa6",
"metadata": {},
"outputs": [

View File

@@ -5,7 +5,7 @@
"id": "41d7e9ff",
"metadata": {},
"source": [
"### PyTorch Fundamentals Part A\n",
"## PyTorch Fundamentals Part A\n",
"\n",
"- A PyTorch tensor is a multi-dimensional array (0D to nD) that contains elements of a single data type (e.g., integers, floats). \n",
"- Tensors are used to represent scalars, vectors, matrices, or higher-dimensional data and are optimized for mathematical operations, automatic differentiation, and GPU computation"
@@ -13,7 +13,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 48,
"id": "739c5173",
"metadata": {},
"outputs": [
@@ -23,7 +23,7 @@
"'2.6.0+cu126'"
]
},
"execution_count": 2,
"execution_count": 48,
"metadata": {},
"output_type": "execute_result"
}
@@ -38,12 +38,12 @@
"id": "75acf7d8",
"metadata": {},
"source": [
"#### Multi-dimensional"
"### Multi-dimensional"
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 49,
"id": "0e82be1e",
"metadata": {},
"outputs": [
@@ -53,7 +53,7 @@
"tensor(5)"
]
},
"execution_count": 3,
"execution_count": 49,
"metadata": {},
"output_type": "execute_result"
}
@@ -66,7 +66,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 50,
"id": "7c239759",
"metadata": {},
"outputs": [
@@ -76,7 +76,7 @@
"0"
]
},
"execution_count": 4,
"execution_count": 50,
"metadata": {},
"output_type": "execute_result"
}
@@ -87,7 +87,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 51,
"id": "d176548d",
"metadata": {},
"outputs": [
@@ -97,7 +97,7 @@
"torch.Size([])"
]
},
"execution_count": 5,
"execution_count": 51,
"metadata": {},
"output_type": "execute_result"
}
@@ -108,7 +108,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 52,
"id": "07e03145",
"metadata": {},
"outputs": [
@@ -118,7 +118,7 @@
"5"
]
},
"execution_count": 6,
"execution_count": 52,
"metadata": {},
"output_type": "execute_result"
}
@@ -129,7 +129,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 53,
"id": "41fcc46e",
"metadata": {},
"outputs": [
@@ -139,7 +139,7 @@
"tensor([1, 2, 3])"
]
},
"execution_count": 7,
"execution_count": 53,
"metadata": {},
"output_type": "execute_result"
}
@@ -152,7 +152,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 54,
"id": "f9894c37",
"metadata": {},
"outputs": [
@@ -162,7 +162,7 @@
"1"
]
},
"execution_count": 8,
"execution_count": 54,
"metadata": {},
"output_type": "execute_result"
}
@@ -173,7 +173,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 55,
"id": "7dc166eb",
"metadata": {},
"outputs": [
@@ -183,7 +183,7 @@
"torch.Size([3])"
]
},
"execution_count": 9,
"execution_count": 55,
"metadata": {},
"output_type": "execute_result"
}
@@ -195,7 +195,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 56,
"id": "2581817b",
"metadata": {},
"outputs": [
@@ -206,7 +206,7 @@
" [ 9, 10]])"
]
},
"execution_count": 10,
"execution_count": 56,
"metadata": {},
"output_type": "execute_result"
}
@@ -220,7 +220,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": 57,
"id": "46961042",
"metadata": {},
"outputs": [
@@ -230,7 +230,7 @@
"2"
]
},
"execution_count": 11,
"execution_count": 57,
"metadata": {},
"output_type": "execute_result"
}
@@ -241,7 +241,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 58,
"id": "9669fda8",
"metadata": {},
"outputs": [
@@ -251,7 +251,7 @@
"torch.Size([2, 2])"
]
},
"execution_count": 12,
"execution_count": 58,
"metadata": {},
"output_type": "execute_result"
}
@@ -262,7 +262,7 @@
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": 59,
"id": "15297945",
"metadata": {},
"outputs": [
@@ -274,7 +274,7 @@
" [2, 4, 5]]])"
]
},
"execution_count": 13,
"execution_count": 59,
"metadata": {},
"output_type": "execute_result"
}
@@ -289,7 +289,7 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": 60,
"id": "5bbed071",
"metadata": {},
"outputs": [
@@ -299,7 +299,7 @@
"3"
]
},
"execution_count": 14,
"execution_count": 60,
"metadata": {},
"output_type": "execute_result"
}
@@ -310,7 +310,7 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": 61,
"id": "483d25c7",
"metadata": {},
"outputs": [
@@ -320,7 +320,7 @@
"torch.Size([1, 3, 3])"
]
},
"execution_count": 15,
"execution_count": 61,
"metadata": {},
"output_type": "execute_result"
}
@@ -331,7 +331,7 @@
},
{
"cell_type": "code",
"execution_count": 16,
"execution_count": 62,
"id": "c4e76ef2",
"metadata": {},
"outputs": [
@@ -341,7 +341,7 @@
"torch.Size([1, 3, 3])"
]
},
"execution_count": 16,
"execution_count": 62,
"metadata": {},
"output_type": "execute_result"
}
@@ -352,7 +352,7 @@
},
{
"cell_type": "code",
"execution_count": 34,
"execution_count": 63,
"id": "b56abf50",
"metadata": {},
"outputs": [
@@ -364,7 +364,7 @@
" [6, 9]])"
]
},
"execution_count": 34,
"execution_count": 63,
"metadata": {},
"output_type": "execute_result"
}
@@ -376,7 +376,7 @@
},
{
"cell_type": "code",
"execution_count": 35,
"execution_count": 64,
"id": "cdd39ae8",
"metadata": {},
"outputs": [
@@ -391,7 +391,7 @@
" [9]])"
]
},
"execution_count": 35,
"execution_count": 64,
"metadata": {},
"output_type": "execute_result"
}
@@ -403,7 +403,7 @@
},
{
"cell_type": "code",
"execution_count": 18,
"execution_count": 65,
"id": "adf1ab41",
"metadata": {},
"outputs": [
@@ -415,7 +415,7 @@
" [2., 4., 5.]]])"
]
},
"execution_count": 18,
"execution_count": 65,
"metadata": {},
"output_type": "execute_result"
}
@@ -430,7 +430,7 @@
},
{
"cell_type": "code",
"execution_count": 19,
"execution_count": 66,
"id": "a368079f",
"metadata": {},
"outputs": [
@@ -440,7 +440,7 @@
"torch.float32"
]
},
"execution_count": 19,
"execution_count": 66,
"metadata": {},
"output_type": "execute_result"
}
@@ -451,20 +451,20 @@
},
{
"cell_type": "code",
"execution_count": 20,
"execution_count": 67,
"id": "4d00ea95",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(tensor([[0.7310, 0.5572],\n",
" [0.9469, 0.2378]]),\n",
" tensor([[0.2700, 0.9798],\n",
" [0.4980, 0.8848]]))"
"(tensor([[0.6636, 0.4190],\n",
" [0.4294, 0.9632]]),\n",
" tensor([[0.0473, 0.9045],\n",
" [0.2971, 0.3203]]))"
]
},
"execution_count": 20,
"execution_count": 67,
"metadata": {},
"output_type": "execute_result"
}
@@ -477,15 +477,266 @@
},
{
"cell_type": "markdown",
"id": "02a00747",
"id": "3852bb53",
"metadata": {},
"source": [
"#### Operation"
"### Slicing"
]
},
{
"cell_type": "code",
"execution_count": 21,
"execution_count": 68,
"id": "aeed7a0a",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([1, 2])"
]
},
"execution_count": 68,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x=torch.tensor([1, 2, 3, 4, 5, 6])\n",
"x[0:2]"
]
},
{
"cell_type": "code",
"execution_count": 69,
"id": "721ce7eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([1, 2])"
]
},
"execution_count": 69,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x=torch.tensor([1, 2, 3, 4, 5, 6])\n",
"x[:2]"
]
},
{
"cell_type": "code",
"execution_count": 70,
"id": "6423f4d2",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor(6)"
]
},
"execution_count": 70,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x=torch.tensor([1, 2, 3, 4, 5, 6])\n",
"x[-1]"
]
},
{
"cell_type": "code",
"execution_count": 71,
"id": "0125386f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([3, 4, 5, 6])"
]
},
"execution_count": 71,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x=torch.tensor([1, 2, 3, 4, 5, 6])\n",
"x[2:]"
]
},
{
"cell_type": "code",
"execution_count": 72,
"id": "97373387",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[1, 2],\n",
" [4, 5]])"
]
},
"execution_count": 72,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Example 2D tensor\n",
"x = torch.tensor([[1, 2, 3],\n",
" [4, 5, 6],\n",
" [7, 8, 9]])\n",
"\n",
"# Syntax: x[row_slice, column_slice]\n",
"\n",
"# Slice the first two rows and the first two columns\n",
"x[:2, :2] # tensor([[1, 2],\n",
" # [4, 5]])"
]
},
{
"cell_type": "code",
"execution_count": 73,
"id": "bba6b1b4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([4, 5, 6])"
]
},
"execution_count": 73,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Slice the second row\n",
"x[1, :] # tensor([4, 5, 6])"
]
},
{
"cell_type": "code",
"execution_count": 74,
"id": "12a96c84",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([3, 6, 9])"
]
},
"execution_count": 74,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Slice the third column\n",
"x[:, 2] # tensor([3, 6, 9])"
]
},
{
"cell_type": "code",
"execution_count": 75,
"id": "a0f73c88",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([[5, 6]])"
]
},
"execution_count": 75,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Example 2D tensor\n",
"x = torch.tensor([[1, 2, 3],\n",
" [4, 5, 6],\n",
" [7, 8, 9]])\n",
"\n",
"# Get a submatrix from row 1 to 2 (exclusive of 2), and column 1 to 3\n",
"x[1:2, 1:3] # tensor([[5, 6]])"
]
},
{
"cell_type": "markdown",
"id": "a3c1d8b5",
"metadata": {},
"source": [
"### sum, mean, "
]
},
{
"cell_type": "code",
"execution_count": 76,
"id": "da5391eb",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([False, False, True, True])"
]
},
"execution_count": 76,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"a=torch.tensor([0, 1, 1, 1])\n",
"b=torch.tensor([1, 0, 1, 1])\n",
"a.eq(b)"
]
},
{
"cell_type": "code",
"execution_count": 77,
"id": "78ed8e4b",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor(2)"
]
},
"execution_count": 77,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# convert tensor tensor([False, False, True, True]) to [0, 0, 1, 1]\n",
"a.eq(b).sum()"
]
},
{
"cell_type": "markdown",
"id": "02a00747",
"metadata": {},
"source": [
"### Operation"
]
},
{
"cell_type": "code",
"execution_count": 78,
"id": "45267f2f",
"metadata": {},
"outputs": [
@@ -498,7 +749,7 @@
" [7, 8]]))"
]
},
"execution_count": 21,
"execution_count": 78,
"metadata": {},
"output_type": "execute_result"
}
@@ -513,7 +764,7 @@
},
{
"cell_type": "code",
"execution_count": 22,
"execution_count": 79,
"id": "193a7828",
"metadata": {},
"outputs": [
@@ -524,7 +775,7 @@
" [10, 12]])"
]
},
"execution_count": 22,
"execution_count": 79,
"metadata": {},
"output_type": "execute_result"
}
@@ -535,7 +786,7 @@
},
{
"cell_type": "code",
"execution_count": 23,
"execution_count": 80,
"id": "1ce81689",
"metadata": {},
"outputs": [
@@ -546,7 +797,7 @@
" [21, 32]])"
]
},
"execution_count": 23,
"execution_count": 80,
"metadata": {},
"output_type": "execute_result"
}
@@ -558,7 +809,7 @@
},
{
"cell_type": "code",
"execution_count": 24,
"execution_count": 81,
"id": "62f8cde3",
"metadata": {},
"outputs": [
@@ -568,7 +819,7 @@
"tensor([11, 12, 13])"
]
},
"execution_count": 24,
"execution_count": 81,
"metadata": {},
"output_type": "execute_result"
}
@@ -583,7 +834,7 @@
},
{
"cell_type": "code",
"execution_count": 25,
"execution_count": 82,
"id": "2098ad78",
"metadata": {},
"outputs": [
@@ -594,7 +845,7 @@
" [4, 5, 6]])"
]
},
"execution_count": 25,
"execution_count": 82,
"metadata": {},
"output_type": "execute_result"
}
@@ -607,7 +858,7 @@
},
{
"cell_type": "code",
"execution_count": 26,
"execution_count": 83,
"id": "883321f8",
"metadata": {},
"outputs": [
@@ -619,7 +870,7 @@
" [5, 6]])"
]
},
"execution_count": 26,
"execution_count": 83,
"metadata": {},
"output_type": "execute_result"
}
@@ -633,7 +884,7 @@
"id": "9d716eb9",
"metadata": {},
"source": [
"### Comparison to NumPy Arrays\n",
"## Comparison to NumPy Arrays\n",
"Tensors are similar to NumPy arrays but add:\n",
"- GPU support.\n",
"- Automatic differentiation (requires_grad).\n",
@@ -642,7 +893,7 @@
},
{
"cell_type": "code",
"execution_count": 27,
"execution_count": 84,
"id": "2a3fd4ae",
"metadata": {},
"outputs": [
@@ -652,7 +903,7 @@
"tensor([1, 2, 3])"
]
},
"execution_count": 27,
"execution_count": 84,
"metadata": {},
"output_type": "execute_result"
}
@@ -665,7 +916,7 @@
},
{
"cell_type": "code",
"execution_count": 28,
"execution_count": 85,
"id": "df247bd3",
"metadata": {},
"outputs": [
@@ -675,7 +926,7 @@
"array([1, 2, 3])"
]
},
"execution_count": 28,
"execution_count": 85,
"metadata": {},
"output_type": "execute_result"
}
@@ -687,7 +938,7 @@
},
{
"cell_type": "code",
"execution_count": 29,
"execution_count": 86,
"id": "9ada07ab",
"metadata": {},
"outputs": [
@@ -697,7 +948,7 @@
"tensor([1, 2, 3])"
]
},
"execution_count": 29,
"execution_count": 86,
"metadata": {},
"output_type": "execute_result"
}
@@ -717,7 +968,7 @@
},
{
"cell_type": "code",
"execution_count": 30,
"execution_count": 87,
"id": "30c9ea9f",
"metadata": {},
"outputs": [
@@ -727,7 +978,7 @@
"True"
]
},
"execution_count": 30,
"execution_count": 87,
"metadata": {},
"output_type": "execute_result"
}
@@ -740,7 +991,7 @@
},
{
"cell_type": "code",
"execution_count": 31,
"execution_count": 88,
"id": "dd523b3e",
"metadata": {},
"outputs": [
@@ -750,7 +1001,7 @@
"'cuda'"
]
},
"execution_count": 31,
"execution_count": 88,
"metadata": {},
"output_type": "execute_result"
}
@@ -763,7 +1014,7 @@
},
{
"cell_type": "code",
"execution_count": 32,
"execution_count": 89,
"id": "11d1a029",
"metadata": {},
"outputs": [
@@ -780,7 +1031,7 @@
"tensor([1, 2, 3], device='cuda:0')"
]
},
"execution_count": 32,
"execution_count": 89,
"metadata": {},
"output_type": "execute_result"
}
@@ -799,7 +1050,7 @@
},
{
"cell_type": "code",
"execution_count": 33,
"execution_count": 90,
"id": "db5249d0",
"metadata": {},
"outputs": [
@@ -807,7 +1058,7 @@
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\Weife\\AppData\\Local\\Temp\\ipykernel_54156\\3540074575.py:6: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
"C:\\Users\\Weife\\AppData\\Local\\Temp\\ipykernel_111340\\3540074575.py:6: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
" y = torch.tensor(x, device=device) # directly create a tensor on GPU\n"
]
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

View File

@@ -0,0 +1,484 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "31ee256c",
"metadata": {},
"source": [
"## Breast cancer prediction"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "53af081c",
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import torch.nn as nn\n",
"import numpy as np\n",
"from sklearn.datasets import load_breast_cancer\n",
"from sklearn.preprocessing import StandardScaler\n",
"from sklearn.model_selection import train_test_split"
]
},
{
"cell_type": "markdown",
"id": "536078f0",
"metadata": {},
"source": [
"### Load and preprocess breast cancer dataset"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "06746e3c",
"metadata": {},
"outputs": [],
"source": [
"\"\"\"Load and preprocess breast cancer dataset.\"\"\"\n",
"# Load dataset\n",
"data = load_breast_cancer()\n",
"X, y = data.data, data.target"
]
},
{
"cell_type": "markdown",
"id": "3477485c",
"metadata": {},
"source": [
"### Understand inputs"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "76d4d576",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(569, 30)"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X.shape"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "fddcc037",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([1.799e+01, 1.038e+01, 1.228e+02, 1.001e+03, 1.184e-01, 2.776e-01,\n",
" 3.001e-01, 1.471e-01, 2.419e-01, 7.871e-02, 1.095e+00, 9.053e-01,\n",
" 8.589e+00, 1.534e+02, 6.399e-03, 4.904e-02, 5.373e-02, 1.587e-02,\n",
" 3.003e-02, 6.193e-03, 2.538e+01, 1.733e+01, 1.846e+02, 2.019e+03,\n",
" 1.622e-01, 6.656e-01, 7.119e-01, 2.654e-01, 4.601e-01, 1.189e-01])"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X[0, :]"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "070dcd69",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(569,)"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"y.shape"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "c4632c29",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"np.int64(0)"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"y[0]"
]
},
{
"cell_type": "markdown",
"id": "b74373cb",
"metadata": {},
"source": [
" ### Split dataset into training and testing"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "0675a8c7",
"metadata": {},
"outputs": [],
"source": [
"X_train, X_test, y_train, y_test = train_test_split(\n",
" X, y, test_size=0.2, random_state=1234\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "bfe70bd9",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(455, 30)"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_train.shape"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "a4df0052",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(114, 30)"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_test.shape"
]
},
{
"cell_type": "markdown",
"id": "d597a997",
"metadata": {},
"source": [
"### Scale fetures\n",
"Scaling features, as done in the code with StandardScaler, transforms the input data so that each feature has a mean of 0 and a standard deviation of 1. This is also known as standardization. The purpose of scaling features in this context is to:\n",
"\n",
"- Improve Model Convergence: Many machine learning algorithms, including neural networks optimized with gradient-based methods like SGD, converge faster when features are on a similar scale. Unscaled features with different ranges can cause gradients to vary widely, slowing down or destabilizing training.\n",
"- Ensure Fair Feature Influence: Features with larger numerical ranges could disproportionately influence the model compared to features with smaller ranges. Standardization ensures all features contribute equally to the model's predictions.\n",
"- Enhance Numerical Stability: Large or highly variable feature values can lead to numerical instability in computations, especially in deep learning frameworks like PyTorch. Scaling mitigates this risk."
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "3aeb88da",
"metadata": {},
"outputs": [],
"source": [
"# Scale features\n",
"scaler = StandardScaler()\n",
"X_train = scaler.fit_transform(X_train)\n",
"X_test = scaler.transform(X_test)\n",
"\n",
"# Convert to PyTorch tensors\n",
"X_train = torch.from_numpy(X_train.astype(np.float32))\n",
"X_test = torch.from_numpy(X_test.astype(np.float32))\n",
"y_train = torch.from_numpy(y_train.astype(np.float32)).view(-1, 1)\n",
"y_test = torch.from_numpy(y_test.astype(np.float32)).view(-1, 1)"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "3b10079f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([455, 30])"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_train.shape"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "13f4059c",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor([-0.3618, -0.2652, -0.3172, -0.4671, 1.8038, 1.1817, -0.5169, 0.1065,\n",
" -0.3901, 1.3914, 0.1437, -0.1208, 0.1601, -0.1326, -0.5863, -0.1248,\n",
" -0.5787, 0.1091, -0.2819, -0.1889, -0.2571, -0.2403, -0.2442, -0.3669,\n",
" 0.5449, 0.2481, -0.7109, -0.0797, -0.5280, 0.2506])"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_train[0,:]"
]
},
{
"cell_type": "markdown",
"id": "b0b15d2f",
"metadata": {},
"source": [
"### Binary Classifier model"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "e1b50a04",
"metadata": {},
"outputs": [],
"source": [
"class BinaryClassifier(nn.Module):\n",
" \"\"\"Simple neural network for binary classification.\"\"\"\n",
" def __init__(self, input_features):\n",
" super(BinaryClassifier, self).__init__()\n",
" self.linear = nn.Linear(input_features, 1)\n",
" \n",
" def forward(self, x):\n",
" return torch.sigmoid(self.linear(x))"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "49694959",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"torch.Size([455, 30])"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"X_train.shape"
]
},
{
"cell_type": "markdown",
"id": "14873622",
"metadata": {},
"source": [
"### show binary classification model \n",
"- the number of input features\n",
"- the number of output features"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "466f6c41",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"BinaryClassifier(\n",
" (linear): Linear(in_features=30, out_features=1, bias=True)\n",
")"
]
},
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"n_features = X_train.shape[1]\n",
"model = BinaryClassifier(n_features)\n",
"model"
]
},
{
"cell_type": "markdown",
"id": "c66978b5",
"metadata": {},
"source": [
"### Train the model with given parameters.\n",
"\n",
"- forward pass: prediction\n",
"- loss: error\n",
"- autograd: weight change direction\n",
"- stochastic gradient descent (optimizer): update weights\n",
"- optimizer.zero_grad()"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "1d1d7868",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch [10/100], Loss: 0.4627\n",
"Epoch [20/100], Loss: 0.4105\n",
"Epoch [30/100], Loss: 0.3721\n",
"Epoch [40/100], Loss: 0.3424\n",
"Epoch [50/100], Loss: 0.3186\n",
"Epoch [60/100], Loss: 0.2990\n",
"Epoch [70/100], Loss: 0.2825\n",
"Epoch [80/100], Loss: 0.2683\n",
"Epoch [90/100], Loss: 0.2560\n",
"Epoch [100/100], Loss: 0.2452\n"
]
}
],
"source": [
"num_epochs=100\n",
"learning_rate=0.01\n",
"\n",
"\"\"\"Train the model with given parameters.\"\"\"\n",
"criterion = nn.BCELoss()\n",
"optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)\n",
"\n",
"for epoch in range(num_epochs):\n",
" # Forward pass\n",
" y_pred = model(X_train)\n",
" loss = criterion(y_pred, y_train)\n",
" \n",
" # Backward pass and optimization\n",
" optimizer.zero_grad()\n",
" loss.backward()\n",
" optimizer.step()\n",
" \n",
" # Log progress\n",
" if (epoch + 1) % 10 == 0:\n",
" print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.4f}')\n"
]
},
{
"cell_type": "markdown",
"id": "1a59248d",
"metadata": {},
"source": [
"### Evaluate model performance on test set"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "eeddd812",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Test Accuracy: 0.8947\n"
]
}
],
"source": [
"with torch.no_grad():\n",
" y_pred = model(X_test)\n",
" y_pred_classes = y_pred.round() # Values 𝑥 ≥ 0.5 are rounded to 1, else 0\n",
" accuracy = y_pred_classes.eq(y_test).sum() / float(y_test.shape[0])\n",
" print(f'\\nTest Accuracy: {accuracy:.4f}')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1dc4fcd3",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.2"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,146 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"id": "53af081c",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Training model...\n",
"Epoch [10/100], Loss: 0.6247\n",
"Epoch [20/100], Loss: 0.4940\n",
"Epoch [30/100], Loss: 0.4156\n",
"Epoch [40/100], Loss: 0.3641\n",
"Epoch [50/100], Loss: 0.3277\n",
"Epoch [60/100], Loss: 0.3005\n",
"Epoch [70/100], Loss: 0.2794\n",
"Epoch [80/100], Loss: 0.2624\n",
"Epoch [90/100], Loss: 0.2483\n",
"Epoch [100/100], Loss: 0.2364\n",
"\n",
"Test Accuracy: 0.9211\n"
]
}
],
"source": [
"import torch\n",
"import torch.nn as nn\n",
"import numpy as np\n",
"from sklearn.datasets import load_breast_cancer\n",
"from sklearn.preprocessing import StandardScaler\n",
"from sklearn.model_selection import train_test_split\n",
"\n",
"def prepare_data():\n",
" \"\"\"Load and preprocess breast cancer dataset.\"\"\"\n",
" # Load dataset\n",
" data = load_breast_cancer()\n",
" X, y = data.data, data.target\n",
" \n",
" # Split dataset\n",
" X_train, X_test, y_train, y_test = train_test_split(\n",
" X, y, test_size=0.2, random_state=1234\n",
" )\n",
" \n",
" # Scale features\n",
" scaler = StandardScaler()\n",
" X_train = scaler.fit_transform(X_train)\n",
" X_test = scaler.transform(X_test)\n",
" \n",
" # Convert to PyTorch tensors\n",
" X_train = torch.from_numpy(X_train.astype(np.float32))\n",
" X_test = torch.from_numpy(X_test.astype(np.float32))\n",
" y_train = torch.from_numpy(y_train.astype(np.float32)).view(-1, 1)\n",
" y_test = torch.from_numpy(y_test.astype(np.float32)).view(-1, 1)\n",
" \n",
" return X_train, X_test, y_train, y_test\n",
"\n",
"class BinaryClassifier(nn.Module):\n",
" \"\"\"Simple neural network for binary classification.\"\"\"\n",
" def __init__(self, input_features):\n",
" super(BinaryClassifier, self).__init__()\n",
" self.linear = nn.Linear(input_features, 1)\n",
" \n",
" def forward(self, x):\n",
" return torch.sigmoid(self.linear(x))\n",
"\n",
"def train_model(model, X_train, y_train, num_epochs=100, learning_rate=0.01):\n",
" \"\"\"Train the model with given parameters.\"\"\"\n",
" criterion = nn.BCELoss()\n",
" optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)\n",
" \n",
" for epoch in range(num_epochs):\n",
" # Forward pass\n",
" y_pred = model(X_train)\n",
" loss = criterion(y_pred, y_train)\n",
" \n",
" # Backward pass and optimization\n",
" optimizer.zero_grad()\n",
" loss.backward()\n",
" optimizer.step()\n",
" \n",
" # Log progress\n",
" if (epoch + 1) % 10 == 0:\n",
" print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.4f}')\n",
"\n",
"def evaluate_model(model, X_test, y_test):\n",
" \"\"\"Evaluate model performance on test set.\"\"\"\n",
" with torch.no_grad():\n",
" y_pred = model(X_test)\n",
" y_pred_classes = y_pred.round()\n",
" accuracy = y_pred_classes.eq(y_test).sum() / float(y_test.shape[0])\n",
" return accuracy.item()\n",
"\n",
"def main():\n",
" # Prepare data\n",
" X_train, X_test, y_train, y_test = prepare_data()\n",
" \n",
" # Initialize model\n",
" n_features = X_train.shape[1]\n",
" model = BinaryClassifier(n_features)\n",
" \n",
" # Train model\n",
" print(\"Training model...\")\n",
" train_model(model, X_train, y_train)\n",
" \n",
" # Evaluate model\n",
" accuracy = evaluate_model(model, X_test, y_test)\n",
" print(f'\\nTest Accuracy: {accuracy:.4f}')\n",
"\n",
"main()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76d4d576",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.2"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,188 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "52950b67",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"First sample - Features: tensor([1.4230e+01, 1.7100e+00, 2.4300e+00, 1.5600e+01, 1.2700e+02, 2.8000e+00,\n",
" 3.0600e+00, 2.8000e-01, 2.2900e+00, 5.6400e+00, 1.0400e+00, 3.9200e+00,\n",
" 1.0650e+03]), Label: tensor([1.])\n",
"Sample batch - Features: torch.Size([4, 13]), Labels: torch.Size([4, 1])\n",
"Total samples: 178, Iterations per epoch: 45\n",
"Epoch: 1/2, Step 5/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 10/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 15/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 20/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 25/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 30/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 35/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 40/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 1/2, Step 45/45 | Inputs torch.Size([2, 13]) | Labels torch.Size([2, 1])\n",
"Epoch: 2/2, Step 5/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 10/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 15/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 20/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 25/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 30/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 35/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 40/45 | Inputs torch.Size([4, 13]) | Labels torch.Size([4, 1])\n",
"Epoch: 2/2, Step 45/45 | Inputs torch.Size([2, 13]) | Labels torch.Size([2, 1])\n"
]
}
],
"source": [
"import torch\n",
"import torchvision\n",
"from torch.utils.data import Dataset, DataLoader\n",
"import numpy as np\n",
"import math\n",
"\n",
"# Custom Dataset class for Wine dataset\n",
"class WineDataset(Dataset):\n",
" def __init__(self, data_path='data/wine.csv'):\n",
" \"\"\"\n",
" Initialize the dataset by loading wine data from a CSV file.\n",
" \n",
" Args:\n",
" data_path (str): Path to the wine CSV file\n",
" \"\"\"\n",
" # Load data from CSV, skipping header row\n",
" xy = np.loadtxt(data_path, delimiter=',', dtype=np.float32, skiprows=1)\n",
" self.n_samples = xy.shape[0]\n",
" \n",
" # Split into features (all columns except first) and labels (first column)\n",
" self.x_data = torch.from_numpy(xy[:, 1:]) # Shape: [n_samples, n_features]\n",
" self.y_data = torch.from_numpy(xy[:, [0]]) # Shape: [n_samples, 1]\n",
"\n",
" def __getitem__(self, index):\n",
" \"\"\"\n",
" Enable indexing to retrieve a specific sample.\n",
" \n",
" Args:\n",
" index (int): Index of the sample to retrieve\n",
" \n",
" Returns:\n",
" tuple: (features, label) for the specified index\n",
" \"\"\"\n",
" return self.x_data[index], self.y_data[index]\n",
"\n",
" def __len__(self):\n",
" \"\"\"\n",
" Return the total number of samples in the dataset.\n",
" \n",
" Returns:\n",
" int: Number of samples\n",
" \"\"\"\n",
" return self.n_samples\n",
"\n",
"# Create dataset instance\n",
"dataset = WineDataset()\n",
"\n",
"# Access and print first sample\n",
"features, labels = dataset[0]\n",
"print(f\"First sample - Features: {features}, Label: {labels}\")\n",
"\n",
"\"\"\"\n",
"Create a DataLoader for the wine dataset.\n",
"\n",
"Args:\n",
" dataset (Dataset): The dataset to load\n",
" batch_size (int): Number of samples per batch\n",
" shuffle (bool): Whether to shuffle the data\n",
" num_workers (int): Number of subprocesses for data loading\n",
" \n",
"Returns:\n",
" DataLoader: Configured DataLoader instance\n",
"\"\"\"\n",
"train_loader = DataLoader(dataset, batch_size=4, shuffle=True, num_workers=0)\n",
"\n",
"# Examine one batch\n",
"dataiter = iter(train_loader)\n",
"features, labels = next(dataiter)\n",
"print(f\"Sample batch - Features: {features.shape}, Labels: {labels.shape}\")\n",
"\n",
"# Training loop parameters\n",
"num_epochs = 2\n",
"total_samples = len(dataset)\n",
"n_iterations = math.ceil(total_samples / 4)\n",
"print(f\"Total samples: {total_samples}, Iterations per epoch: {n_iterations}\")\n",
"\n",
"# Dummy training loop\n",
"for epoch in range(num_epochs):\n",
" for i, (inputs, labels) in enumerate(train_loader):\n",
" # Training step\n",
" if (i + 1) % 5 == 0:\n",
" print(f'Epoch: {epoch+1}/{num_epochs}, Step {i+1}/{n_iterations} | '\n",
" f'Inputs {inputs.shape} | Labels {labels.shape}')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "37095d28",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|██████████| 9.91M/9.91M [00:02<00:00, 4.92MB/s]\n",
"100%|██████████| 28.9k/28.9k [00:00<00:00, 3.21MB/s]\n",
"100%|██████████| 1.65M/1.65M [00:00<00:00, 9.59MB/s]\n",
"100%|██████████| 4.54k/4.54k [00:00<00:00, 9.73MB/s]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"MNIST batch - Inputs: torch.Size([3, 1, 28, 28]), Targets: torch.Size([3])\n"
]
}
],
"source": [
"# Example with MNIST dataset\n",
"train_dataset = torchvision.datasets.MNIST(root='./data',\n",
" train=True,\n",
" transform=torchvision.transforms.ToTensor(),\n",
" download=True)\n",
"\n",
"mnist_loader = DataLoader(dataset=train_dataset,\n",
" batch_size=3,\n",
" shuffle=True)\n",
"\n",
"# Examine MNIST batch\n",
"dataiter = iter(mnist_loader)\n",
"inputs, targets = next(dataiter)\n",
"print(f\"MNIST batch - Inputs: {inputs.shape}, Targets: {targets.shape}\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.2"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,179 @@
Wine,Alcohol,Malic.acid,Ash,Acl,Mg,Phenols,Flavanoids,Nonflavanoid.phenols,Proanth,Color.int,Hue,OD,Proline
1,14.23,1.71,2.43,15.6,127,2.8,3.06,.28,2.29,5.64,1.04,3.92,1065
1,13.2,1.78,2.14,11.2,100,2.65,2.76,.26,1.28,4.38,1.05,3.4,1050
1,13.16,2.36,2.67,18.6,101,2.8,3.24,.3,2.81,5.68,1.03,3.17,1185
1,14.37,1.95,2.5,16.8,113,3.85,3.49,.24,2.18,7.8,.86,3.45,1480
1,13.24,2.59,2.87,21,118,2.8,2.69,.39,1.82,4.32,1.04,2.93,735
1,14.2,1.76,2.45,15.2,112,3.27,3.39,.34,1.97,6.75,1.05,2.85,1450
1,14.39,1.87,2.45,14.6,96,2.5,2.52,.3,1.98,5.25,1.02,3.58,1290
1,14.06,2.15,2.61,17.6,121,2.6,2.51,.31,1.25,5.05,1.06,3.58,1295
1,14.83,1.64,2.17,14,97,2.8,2.98,.29,1.98,5.2,1.08,2.85,1045
1,13.86,1.35,2.27,16,98,2.98,3.15,.22,1.85,7.22,1.01,3.55,1045
1,14.1,2.16,2.3,18,105,2.95,3.32,.22,2.38,5.75,1.25,3.17,1510
1,14.12,1.48,2.32,16.8,95,2.2,2.43,.26,1.57,5,1.17,2.82,1280
1,13.75,1.73,2.41,16,89,2.6,2.76,.29,1.81,5.6,1.15,2.9,1320
1,14.75,1.73,2.39,11.4,91,3.1,3.69,.43,2.81,5.4,1.25,2.73,1150
1,14.38,1.87,2.38,12,102,3.3,3.64,.29,2.96,7.5,1.2,3,1547
1,13.63,1.81,2.7,17.2,112,2.85,2.91,.3,1.46,7.3,1.28,2.88,1310
1,14.3,1.92,2.72,20,120,2.8,3.14,.33,1.97,6.2,1.07,2.65,1280
1,13.83,1.57,2.62,20,115,2.95,3.4,.4,1.72,6.6,1.13,2.57,1130
1,14.19,1.59,2.48,16.5,108,3.3,3.93,.32,1.86,8.7,1.23,2.82,1680
1,13.64,3.1,2.56,15.2,116,2.7,3.03,.17,1.66,5.1,.96,3.36,845
1,14.06,1.63,2.28,16,126,3,3.17,.24,2.1,5.65,1.09,3.71,780
1,12.93,3.8,2.65,18.6,102,2.41,2.41,.25,1.98,4.5,1.03,3.52,770
1,13.71,1.86,2.36,16.6,101,2.61,2.88,.27,1.69,3.8,1.11,4,1035
1,12.85,1.6,2.52,17.8,95,2.48,2.37,.26,1.46,3.93,1.09,3.63,1015
1,13.5,1.81,2.61,20,96,2.53,2.61,.28,1.66,3.52,1.12,3.82,845
1,13.05,2.05,3.22,25,124,2.63,2.68,.47,1.92,3.58,1.13,3.2,830
1,13.39,1.77,2.62,16.1,93,2.85,2.94,.34,1.45,4.8,.92,3.22,1195
1,13.3,1.72,2.14,17,94,2.4,2.19,.27,1.35,3.95,1.02,2.77,1285
1,13.87,1.9,2.8,19.4,107,2.95,2.97,.37,1.76,4.5,1.25,3.4,915
1,14.02,1.68,2.21,16,96,2.65,2.33,.26,1.98,4.7,1.04,3.59,1035
1,13.73,1.5,2.7,22.5,101,3,3.25,.29,2.38,5.7,1.19,2.71,1285
1,13.58,1.66,2.36,19.1,106,2.86,3.19,.22,1.95,6.9,1.09,2.88,1515
1,13.68,1.83,2.36,17.2,104,2.42,2.69,.42,1.97,3.84,1.23,2.87,990
1,13.76,1.53,2.7,19.5,132,2.95,2.74,.5,1.35,5.4,1.25,3,1235
1,13.51,1.8,2.65,19,110,2.35,2.53,.29,1.54,4.2,1.1,2.87,1095
1,13.48,1.81,2.41,20.5,100,2.7,2.98,.26,1.86,5.1,1.04,3.47,920
1,13.28,1.64,2.84,15.5,110,2.6,2.68,.34,1.36,4.6,1.09,2.78,880
1,13.05,1.65,2.55,18,98,2.45,2.43,.29,1.44,4.25,1.12,2.51,1105
1,13.07,1.5,2.1,15.5,98,2.4,2.64,.28,1.37,3.7,1.18,2.69,1020
1,14.22,3.99,2.51,13.2,128,3,3.04,.2,2.08,5.1,.89,3.53,760
1,13.56,1.71,2.31,16.2,117,3.15,3.29,.34,2.34,6.13,.95,3.38,795
1,13.41,3.84,2.12,18.8,90,2.45,2.68,.27,1.48,4.28,.91,3,1035
1,13.88,1.89,2.59,15,101,3.25,3.56,.17,1.7,5.43,.88,3.56,1095
1,13.24,3.98,2.29,17.5,103,2.64,2.63,.32,1.66,4.36,.82,3,680
1,13.05,1.77,2.1,17,107,3,3,.28,2.03,5.04,.88,3.35,885
1,14.21,4.04,2.44,18.9,111,2.85,2.65,.3,1.25,5.24,.87,3.33,1080
1,14.38,3.59,2.28,16,102,3.25,3.17,.27,2.19,4.9,1.04,3.44,1065
1,13.9,1.68,2.12,16,101,3.1,3.39,.21,2.14,6.1,.91,3.33,985
1,14.1,2.02,2.4,18.8,103,2.75,2.92,.32,2.38,6.2,1.07,2.75,1060
1,13.94,1.73,2.27,17.4,108,2.88,3.54,.32,2.08,8.90,1.12,3.1,1260
1,13.05,1.73,2.04,12.4,92,2.72,3.27,.17,2.91,7.2,1.12,2.91,1150
1,13.83,1.65,2.6,17.2,94,2.45,2.99,.22,2.29,5.6,1.24,3.37,1265
1,13.82,1.75,2.42,14,111,3.88,3.74,.32,1.87,7.05,1.01,3.26,1190
1,13.77,1.9,2.68,17.1,115,3,2.79,.39,1.68,6.3,1.13,2.93,1375
1,13.74,1.67,2.25,16.4,118,2.6,2.9,.21,1.62,5.85,.92,3.2,1060
1,13.56,1.73,2.46,20.5,116,2.96,2.78,.2,2.45,6.25,.98,3.03,1120
1,14.22,1.7,2.3,16.3,118,3.2,3,.26,2.03,6.38,.94,3.31,970
1,13.29,1.97,2.68,16.8,102,3,3.23,.31,1.66,6,1.07,2.84,1270
1,13.72,1.43,2.5,16.7,108,3.4,3.67,.19,2.04,6.8,.89,2.87,1285
2,12.37,.94,1.36,10.6,88,1.98,.57,.28,.42,1.95,1.05,1.82,520
2,12.33,1.1,2.28,16,101,2.05,1.09,.63,.41,3.27,1.25,1.67,680
2,12.64,1.36,2.02,16.8,100,2.02,1.41,.53,.62,5.75,.98,1.59,450
2,13.67,1.25,1.92,18,94,2.1,1.79,.32,.73,3.8,1.23,2.46,630
2,12.37,1.13,2.16,19,87,3.5,3.1,.19,1.87,4.45,1.22,2.87,420
2,12.17,1.45,2.53,19,104,1.89,1.75,.45,1.03,2.95,1.45,2.23,355
2,12.37,1.21,2.56,18.1,98,2.42,2.65,.37,2.08,4.6,1.19,2.3,678
2,13.11,1.01,1.7,15,78,2.98,3.18,.26,2.28,5.3,1.12,3.18,502
2,12.37,1.17,1.92,19.6,78,2.11,2,.27,1.04,4.68,1.12,3.48,510
2,13.34,.94,2.36,17,110,2.53,1.3,.55,.42,3.17,1.02,1.93,750
2,12.21,1.19,1.75,16.8,151,1.85,1.28,.14,2.5,2.85,1.28,3.07,718
2,12.29,1.61,2.21,20.4,103,1.1,1.02,.37,1.46,3.05,.906,1.82,870
2,13.86,1.51,2.67,25,86,2.95,2.86,.21,1.87,3.38,1.36,3.16,410
2,13.49,1.66,2.24,24,87,1.88,1.84,.27,1.03,3.74,.98,2.78,472
2,12.99,1.67,2.6,30,139,3.3,2.89,.21,1.96,3.35,1.31,3.5,985
2,11.96,1.09,2.3,21,101,3.38,2.14,.13,1.65,3.21,.99,3.13,886
2,11.66,1.88,1.92,16,97,1.61,1.57,.34,1.15,3.8,1.23,2.14,428
2,13.03,.9,1.71,16,86,1.95,2.03,.24,1.46,4.6,1.19,2.48,392
2,11.84,2.89,2.23,18,112,1.72,1.32,.43,.95,2.65,.96,2.52,500
2,12.33,.99,1.95,14.8,136,1.9,1.85,.35,2.76,3.4,1.06,2.31,750
2,12.7,3.87,2.4,23,101,2.83,2.55,.43,1.95,2.57,1.19,3.13,463
2,12,.92,2,19,86,2.42,2.26,.3,1.43,2.5,1.38,3.12,278
2,12.72,1.81,2.2,18.8,86,2.2,2.53,.26,1.77,3.9,1.16,3.14,714
2,12.08,1.13,2.51,24,78,2,1.58,.4,1.4,2.2,1.31,2.72,630
2,13.05,3.86,2.32,22.5,85,1.65,1.59,.61,1.62,4.8,.84,2.01,515
2,11.84,.89,2.58,18,94,2.2,2.21,.22,2.35,3.05,.79,3.08,520
2,12.67,.98,2.24,18,99,2.2,1.94,.3,1.46,2.62,1.23,3.16,450
2,12.16,1.61,2.31,22.8,90,1.78,1.69,.43,1.56,2.45,1.33,2.26,495
2,11.65,1.67,2.62,26,88,1.92,1.61,.4,1.34,2.6,1.36,3.21,562
2,11.64,2.06,2.46,21.6,84,1.95,1.69,.48,1.35,2.8,1,2.75,680
2,12.08,1.33,2.3,23.6,70,2.2,1.59,.42,1.38,1.74,1.07,3.21,625
2,12.08,1.83,2.32,18.5,81,1.6,1.5,.52,1.64,2.4,1.08,2.27,480
2,12,1.51,2.42,22,86,1.45,1.25,.5,1.63,3.6,1.05,2.65,450
2,12.69,1.53,2.26,20.7,80,1.38,1.46,.58,1.62,3.05,.96,2.06,495
2,12.29,2.83,2.22,18,88,2.45,2.25,.25,1.99,2.15,1.15,3.3,290
2,11.62,1.99,2.28,18,98,3.02,2.26,.17,1.35,3.25,1.16,2.96,345
2,12.47,1.52,2.2,19,162,2.5,2.27,.32,3.28,2.6,1.16,2.63,937
2,11.81,2.12,2.74,21.5,134,1.6,.99,.14,1.56,2.5,.95,2.26,625
2,12.29,1.41,1.98,16,85,2.55,2.5,.29,1.77,2.9,1.23,2.74,428
2,12.37,1.07,2.1,18.5,88,3.52,3.75,.24,1.95,4.5,1.04,2.77,660
2,12.29,3.17,2.21,18,88,2.85,2.99,.45,2.81,2.3,1.42,2.83,406
2,12.08,2.08,1.7,17.5,97,2.23,2.17,.26,1.4,3.3,1.27,2.96,710
2,12.6,1.34,1.9,18.5,88,1.45,1.36,.29,1.35,2.45,1.04,2.77,562
2,12.34,2.45,2.46,21,98,2.56,2.11,.34,1.31,2.8,.8,3.38,438
2,11.82,1.72,1.88,19.5,86,2.5,1.64,.37,1.42,2.06,.94,2.44,415
2,12.51,1.73,1.98,20.5,85,2.2,1.92,.32,1.48,2.94,1.04,3.57,672
2,12.42,2.55,2.27,22,90,1.68,1.84,.66,1.42,2.7,.86,3.3,315
2,12.25,1.73,2.12,19,80,1.65,2.03,.37,1.63,3.4,1,3.17,510
2,12.72,1.75,2.28,22.5,84,1.38,1.76,.48,1.63,3.3,.88,2.42,488
2,12.22,1.29,1.94,19,92,2.36,2.04,.39,2.08,2.7,.86,3.02,312
2,11.61,1.35,2.7,20,94,2.74,2.92,.29,2.49,2.65,.96,3.26,680
2,11.46,3.74,1.82,19.5,107,3.18,2.58,.24,3.58,2.9,.75,2.81,562
2,12.52,2.43,2.17,21,88,2.55,2.27,.26,1.22,2,.9,2.78,325
2,11.76,2.68,2.92,20,103,1.75,2.03,.6,1.05,3.8,1.23,2.5,607
2,11.41,.74,2.5,21,88,2.48,2.01,.42,1.44,3.08,1.1,2.31,434
2,12.08,1.39,2.5,22.5,84,2.56,2.29,.43,1.04,2.9,.93,3.19,385
2,11.03,1.51,2.2,21.5,85,2.46,2.17,.52,2.01,1.9,1.71,2.87,407
2,11.82,1.47,1.99,20.8,86,1.98,1.6,.3,1.53,1.95,.95,3.33,495
2,12.42,1.61,2.19,22.5,108,2,2.09,.34,1.61,2.06,1.06,2.96,345
2,12.77,3.43,1.98,16,80,1.63,1.25,.43,.83,3.4,.7,2.12,372
2,12,3.43,2,19,87,2,1.64,.37,1.87,1.28,.93,3.05,564
2,11.45,2.4,2.42,20,96,2.9,2.79,.32,1.83,3.25,.8,3.39,625
2,11.56,2.05,3.23,28.5,119,3.18,5.08,.47,1.87,6,.93,3.69,465
2,12.42,4.43,2.73,26.5,102,2.2,2.13,.43,1.71,2.08,.92,3.12,365
2,13.05,5.8,2.13,21.5,86,2.62,2.65,.3,2.01,2.6,.73,3.1,380
2,11.87,4.31,2.39,21,82,2.86,3.03,.21,2.91,2.8,.75,3.64,380
2,12.07,2.16,2.17,21,85,2.6,2.65,.37,1.35,2.76,.86,3.28,378
2,12.43,1.53,2.29,21.5,86,2.74,3.15,.39,1.77,3.94,.69,2.84,352
2,11.79,2.13,2.78,28.5,92,2.13,2.24,.58,1.76,3,.97,2.44,466
2,12.37,1.63,2.3,24.5,88,2.22,2.45,.4,1.9,2.12,.89,2.78,342
2,12.04,4.3,2.38,22,80,2.1,1.75,.42,1.35,2.6,.79,2.57,580
3,12.86,1.35,2.32,18,122,1.51,1.25,.21,.94,4.1,.76,1.29,630
3,12.88,2.99,2.4,20,104,1.3,1.22,.24,.83,5.4,.74,1.42,530
3,12.81,2.31,2.4,24,98,1.15,1.09,.27,.83,5.7,.66,1.36,560
3,12.7,3.55,2.36,21.5,106,1.7,1.2,.17,.84,5,.78,1.29,600
3,12.51,1.24,2.25,17.5,85,2,.58,.6,1.25,5.45,.75,1.51,650
3,12.6,2.46,2.2,18.5,94,1.62,.66,.63,.94,7.1,.73,1.58,695
3,12.25,4.72,2.54,21,89,1.38,.47,.53,.8,3.85,.75,1.27,720
3,12.53,5.51,2.64,25,96,1.79,.6,.63,1.1,5,.82,1.69,515
3,13.49,3.59,2.19,19.5,88,1.62,.48,.58,.88,5.7,.81,1.82,580
3,12.84,2.96,2.61,24,101,2.32,.6,.53,.81,4.92,.89,2.15,590
3,12.93,2.81,2.7,21,96,1.54,.5,.53,.75,4.6,.77,2.31,600
3,13.36,2.56,2.35,20,89,1.4,.5,.37,.64,5.6,.7,2.47,780
3,13.52,3.17,2.72,23.5,97,1.55,.52,.5,.55,4.35,.89,2.06,520
3,13.62,4.95,2.35,20,92,2,.8,.47,1.02,4.4,.91,2.05,550
3,12.25,3.88,2.2,18.5,112,1.38,.78,.29,1.14,8.21,.65,2,855
3,13.16,3.57,2.15,21,102,1.5,.55,.43,1.3,4,.6,1.68,830
3,13.88,5.04,2.23,20,80,.98,.34,.4,.68,4.9,.58,1.33,415
3,12.87,4.61,2.48,21.5,86,1.7,.65,.47,.86,7.65,.54,1.86,625
3,13.32,3.24,2.38,21.5,92,1.93,.76,.45,1.25,8.42,.55,1.62,650
3,13.08,3.9,2.36,21.5,113,1.41,1.39,.34,1.14,9.40,.57,1.33,550
3,13.5,3.12,2.62,24,123,1.4,1.57,.22,1.25,8.60,.59,1.3,500
3,12.79,2.67,2.48,22,112,1.48,1.36,.24,1.26,10.8,.48,1.47,480
3,13.11,1.9,2.75,25.5,116,2.2,1.28,.26,1.56,7.1,.61,1.33,425
3,13.23,3.3,2.28,18.5,98,1.8,.83,.61,1.87,10.52,.56,1.51,675
3,12.58,1.29,2.1,20,103,1.48,.58,.53,1.4,7.6,.58,1.55,640
3,13.17,5.19,2.32,22,93,1.74,.63,.61,1.55,7.9,.6,1.48,725
3,13.84,4.12,2.38,19.5,89,1.8,.83,.48,1.56,9.01,.57,1.64,480
3,12.45,3.03,2.64,27,97,1.9,.58,.63,1.14,7.5,.67,1.73,880
3,14.34,1.68,2.7,25,98,2.8,1.31,.53,2.7,13,.57,1.96,660
3,13.48,1.67,2.64,22.5,89,2.6,1.1,.52,2.29,11.75,.57,1.78,620
3,12.36,3.83,2.38,21,88,2.3,.92,.5,1.04,7.65,.56,1.58,520
3,13.69,3.26,2.54,20,107,1.83,.56,.5,.8,5.88,.96,1.82,680
3,12.85,3.27,2.58,22,106,1.65,.6,.6,.96,5.58,.87,2.11,570
3,12.96,3.45,2.35,18.5,106,1.39,.7,.4,.94,5.28,.68,1.75,675
3,13.78,2.76,2.3,22,90,1.35,.68,.41,1.03,9.58,.7,1.68,615
3,13.73,4.36,2.26,22.5,88,1.28,.47,.52,1.15,6.62,.78,1.75,520
3,13.45,3.7,2.6,23,111,1.7,.92,.43,1.46,10.68,.85,1.56,695
3,12.82,3.37,2.3,19.5,88,1.48,.66,.4,.97,10.26,.72,1.75,685
3,13.58,2.58,2.69,24.5,105,1.55,.84,.39,1.54,8.66,.74,1.8,750
3,13.4,4.6,2.86,25,112,1.98,.96,.27,1.11,8.5,.67,1.92,630
3,12.2,3.03,2.32,19,96,1.25,.49,.4,.73,5.5,.66,1.83,510
3,12.77,2.39,2.28,19.5,86,1.39,.51,.48,.64,9.899999,.57,1.63,470
3,14.16,2.51,2.48,20,91,1.68,.7,.44,1.24,9.7,.62,1.71,660
3,13.71,5.65,2.45,20.5,95,1.68,.61,.52,1.06,7.7,.64,1.74,740
3,13.4,3.91,2.48,23,102,1.8,.75,.43,1.41,7.3,.7,1.56,750
3,13.27,4.28,2.26,20,120,1.59,.69,.43,1.35,10.2,.59,1.56,835
3,13.17,2.59,2.37,20,120,1.65,.68,.53,1.46,9.3,.6,1.62,840
3,14.13,4.1,2.74,24.5,96,2.05,.76,.56,1.35,9.2,.61,1.6,560
1 Wine Alcohol Malic.acid Ash Acl Mg Phenols Flavanoids Nonflavanoid.phenols Proanth Color.int Hue OD Proline
2 1 14.23 1.71 2.43 15.6 127 2.8 3.06 .28 2.29 5.64 1.04 3.92 1065
3 1 13.2 1.78 2.14 11.2 100 2.65 2.76 .26 1.28 4.38 1.05 3.4 1050
4 1 13.16 2.36 2.67 18.6 101 2.8 3.24 .3 2.81 5.68 1.03 3.17 1185
5 1 14.37 1.95 2.5 16.8 113 3.85 3.49 .24 2.18 7.8 .86 3.45 1480
6 1 13.24 2.59 2.87 21 118 2.8 2.69 .39 1.82 4.32 1.04 2.93 735
7 1 14.2 1.76 2.45 15.2 112 3.27 3.39 .34 1.97 6.75 1.05 2.85 1450
8 1 14.39 1.87 2.45 14.6 96 2.5 2.52 .3 1.98 5.25 1.02 3.58 1290
9 1 14.06 2.15 2.61 17.6 121 2.6 2.51 .31 1.25 5.05 1.06 3.58 1295
10 1 14.83 1.64 2.17 14 97 2.8 2.98 .29 1.98 5.2 1.08 2.85 1045
11 1 13.86 1.35 2.27 16 98 2.98 3.15 .22 1.85 7.22 1.01 3.55 1045
12 1 14.1 2.16 2.3 18 105 2.95 3.32 .22 2.38 5.75 1.25 3.17 1510
13 1 14.12 1.48 2.32 16.8 95 2.2 2.43 .26 1.57 5 1.17 2.82 1280
14 1 13.75 1.73 2.41 16 89 2.6 2.76 .29 1.81 5.6 1.15 2.9 1320
15 1 14.75 1.73 2.39 11.4 91 3.1 3.69 .43 2.81 5.4 1.25 2.73 1150
16 1 14.38 1.87 2.38 12 102 3.3 3.64 .29 2.96 7.5 1.2 3 1547
17 1 13.63 1.81 2.7 17.2 112 2.85 2.91 .3 1.46 7.3 1.28 2.88 1310
18 1 14.3 1.92 2.72 20 120 2.8 3.14 .33 1.97 6.2 1.07 2.65 1280
19 1 13.83 1.57 2.62 20 115 2.95 3.4 .4 1.72 6.6 1.13 2.57 1130
20 1 14.19 1.59 2.48 16.5 108 3.3 3.93 .32 1.86 8.7 1.23 2.82 1680
21 1 13.64 3.1 2.56 15.2 116 2.7 3.03 .17 1.66 5.1 .96 3.36 845
22 1 14.06 1.63 2.28 16 126 3 3.17 .24 2.1 5.65 1.09 3.71 780
23 1 12.93 3.8 2.65 18.6 102 2.41 2.41 .25 1.98 4.5 1.03 3.52 770
24 1 13.71 1.86 2.36 16.6 101 2.61 2.88 .27 1.69 3.8 1.11 4 1035
25 1 12.85 1.6 2.52 17.8 95 2.48 2.37 .26 1.46 3.93 1.09 3.63 1015
26 1 13.5 1.81 2.61 20 96 2.53 2.61 .28 1.66 3.52 1.12 3.82 845
27 1 13.05 2.05 3.22 25 124 2.63 2.68 .47 1.92 3.58 1.13 3.2 830
28 1 13.39 1.77 2.62 16.1 93 2.85 2.94 .34 1.45 4.8 .92 3.22 1195
29 1 13.3 1.72 2.14 17 94 2.4 2.19 .27 1.35 3.95 1.02 2.77 1285
30 1 13.87 1.9 2.8 19.4 107 2.95 2.97 .37 1.76 4.5 1.25 3.4 915
31 1 14.02 1.68 2.21 16 96 2.65 2.33 .26 1.98 4.7 1.04 3.59 1035
32 1 13.73 1.5 2.7 22.5 101 3 3.25 .29 2.38 5.7 1.19 2.71 1285
33 1 13.58 1.66 2.36 19.1 106 2.86 3.19 .22 1.95 6.9 1.09 2.88 1515
34 1 13.68 1.83 2.36 17.2 104 2.42 2.69 .42 1.97 3.84 1.23 2.87 990
35 1 13.76 1.53 2.7 19.5 132 2.95 2.74 .5 1.35 5.4 1.25 3 1235
36 1 13.51 1.8 2.65 19 110 2.35 2.53 .29 1.54 4.2 1.1 2.87 1095
37 1 13.48 1.81 2.41 20.5 100 2.7 2.98 .26 1.86 5.1 1.04 3.47 920
38 1 13.28 1.64 2.84 15.5 110 2.6 2.68 .34 1.36 4.6 1.09 2.78 880
39 1 13.05 1.65 2.55 18 98 2.45 2.43 .29 1.44 4.25 1.12 2.51 1105
40 1 13.07 1.5 2.1 15.5 98 2.4 2.64 .28 1.37 3.7 1.18 2.69 1020
41 1 14.22 3.99 2.51 13.2 128 3 3.04 .2 2.08 5.1 .89 3.53 760
42 1 13.56 1.71 2.31 16.2 117 3.15 3.29 .34 2.34 6.13 .95 3.38 795
43 1 13.41 3.84 2.12 18.8 90 2.45 2.68 .27 1.48 4.28 .91 3 1035
44 1 13.88 1.89 2.59 15 101 3.25 3.56 .17 1.7 5.43 .88 3.56 1095
45 1 13.24 3.98 2.29 17.5 103 2.64 2.63 .32 1.66 4.36 .82 3 680
46 1 13.05 1.77 2.1 17 107 3 3 .28 2.03 5.04 .88 3.35 885
47 1 14.21 4.04 2.44 18.9 111 2.85 2.65 .3 1.25 5.24 .87 3.33 1080
48 1 14.38 3.59 2.28 16 102 3.25 3.17 .27 2.19 4.9 1.04 3.44 1065
49 1 13.9 1.68 2.12 16 101 3.1 3.39 .21 2.14 6.1 .91 3.33 985
50 1 14.1 2.02 2.4 18.8 103 2.75 2.92 .32 2.38 6.2 1.07 2.75 1060
51 1 13.94 1.73 2.27 17.4 108 2.88 3.54 .32 2.08 8.90 1.12 3.1 1260
52 1 13.05 1.73 2.04 12.4 92 2.72 3.27 .17 2.91 7.2 1.12 2.91 1150
53 1 13.83 1.65 2.6 17.2 94 2.45 2.99 .22 2.29 5.6 1.24 3.37 1265
54 1 13.82 1.75 2.42 14 111 3.88 3.74 .32 1.87 7.05 1.01 3.26 1190
55 1 13.77 1.9 2.68 17.1 115 3 2.79 .39 1.68 6.3 1.13 2.93 1375
56 1 13.74 1.67 2.25 16.4 118 2.6 2.9 .21 1.62 5.85 .92 3.2 1060
57 1 13.56 1.73 2.46 20.5 116 2.96 2.78 .2 2.45 6.25 .98 3.03 1120
58 1 14.22 1.7 2.3 16.3 118 3.2 3 .26 2.03 6.38 .94 3.31 970
59 1 13.29 1.97 2.68 16.8 102 3 3.23 .31 1.66 6 1.07 2.84 1270
60 1 13.72 1.43 2.5 16.7 108 3.4 3.67 .19 2.04 6.8 .89 2.87 1285
61 2 12.37 .94 1.36 10.6 88 1.98 .57 .28 .42 1.95 1.05 1.82 520
62 2 12.33 1.1 2.28 16 101 2.05 1.09 .63 .41 3.27 1.25 1.67 680
63 2 12.64 1.36 2.02 16.8 100 2.02 1.41 .53 .62 5.75 .98 1.59 450
64 2 13.67 1.25 1.92 18 94 2.1 1.79 .32 .73 3.8 1.23 2.46 630
65 2 12.37 1.13 2.16 19 87 3.5 3.1 .19 1.87 4.45 1.22 2.87 420
66 2 12.17 1.45 2.53 19 104 1.89 1.75 .45 1.03 2.95 1.45 2.23 355
67 2 12.37 1.21 2.56 18.1 98 2.42 2.65 .37 2.08 4.6 1.19 2.3 678
68 2 13.11 1.01 1.7 15 78 2.98 3.18 .26 2.28 5.3 1.12 3.18 502
69 2 12.37 1.17 1.92 19.6 78 2.11 2 .27 1.04 4.68 1.12 3.48 510
70 2 13.34 .94 2.36 17 110 2.53 1.3 .55 .42 3.17 1.02 1.93 750
71 2 12.21 1.19 1.75 16.8 151 1.85 1.28 .14 2.5 2.85 1.28 3.07 718
72 2 12.29 1.61 2.21 20.4 103 1.1 1.02 .37 1.46 3.05 .906 1.82 870
73 2 13.86 1.51 2.67 25 86 2.95 2.86 .21 1.87 3.38 1.36 3.16 410
74 2 13.49 1.66 2.24 24 87 1.88 1.84 .27 1.03 3.74 .98 2.78 472
75 2 12.99 1.67 2.6 30 139 3.3 2.89 .21 1.96 3.35 1.31 3.5 985
76 2 11.96 1.09 2.3 21 101 3.38 2.14 .13 1.65 3.21 .99 3.13 886
77 2 11.66 1.88 1.92 16 97 1.61 1.57 .34 1.15 3.8 1.23 2.14 428
78 2 13.03 .9 1.71 16 86 1.95 2.03 .24 1.46 4.6 1.19 2.48 392
79 2 11.84 2.89 2.23 18 112 1.72 1.32 .43 .95 2.65 .96 2.52 500
80 2 12.33 .99 1.95 14.8 136 1.9 1.85 .35 2.76 3.4 1.06 2.31 750
81 2 12.7 3.87 2.4 23 101 2.83 2.55 .43 1.95 2.57 1.19 3.13 463
82 2 12 .92 2 19 86 2.42 2.26 .3 1.43 2.5 1.38 3.12 278
83 2 12.72 1.81 2.2 18.8 86 2.2 2.53 .26 1.77 3.9 1.16 3.14 714
84 2 12.08 1.13 2.51 24 78 2 1.58 .4 1.4 2.2 1.31 2.72 630
85 2 13.05 3.86 2.32 22.5 85 1.65 1.59 .61 1.62 4.8 .84 2.01 515
86 2 11.84 .89 2.58 18 94 2.2 2.21 .22 2.35 3.05 .79 3.08 520
87 2 12.67 .98 2.24 18 99 2.2 1.94 .3 1.46 2.62 1.23 3.16 450
88 2 12.16 1.61 2.31 22.8 90 1.78 1.69 .43 1.56 2.45 1.33 2.26 495
89 2 11.65 1.67 2.62 26 88 1.92 1.61 .4 1.34 2.6 1.36 3.21 562
90 2 11.64 2.06 2.46 21.6 84 1.95 1.69 .48 1.35 2.8 1 2.75 680
91 2 12.08 1.33 2.3 23.6 70 2.2 1.59 .42 1.38 1.74 1.07 3.21 625
92 2 12.08 1.83 2.32 18.5 81 1.6 1.5 .52 1.64 2.4 1.08 2.27 480
93 2 12 1.51 2.42 22 86 1.45 1.25 .5 1.63 3.6 1.05 2.65 450
94 2 12.69 1.53 2.26 20.7 80 1.38 1.46 .58 1.62 3.05 .96 2.06 495
95 2 12.29 2.83 2.22 18 88 2.45 2.25 .25 1.99 2.15 1.15 3.3 290
96 2 11.62 1.99 2.28 18 98 3.02 2.26 .17 1.35 3.25 1.16 2.96 345
97 2 12.47 1.52 2.2 19 162 2.5 2.27 .32 3.28 2.6 1.16 2.63 937
98 2 11.81 2.12 2.74 21.5 134 1.6 .99 .14 1.56 2.5 .95 2.26 625
99 2 12.29 1.41 1.98 16 85 2.55 2.5 .29 1.77 2.9 1.23 2.74 428
100 2 12.37 1.07 2.1 18.5 88 3.52 3.75 .24 1.95 4.5 1.04 2.77 660
101 2 12.29 3.17 2.21 18 88 2.85 2.99 .45 2.81 2.3 1.42 2.83 406
102 2 12.08 2.08 1.7 17.5 97 2.23 2.17 .26 1.4 3.3 1.27 2.96 710
103 2 12.6 1.34 1.9 18.5 88 1.45 1.36 .29 1.35 2.45 1.04 2.77 562
104 2 12.34 2.45 2.46 21 98 2.56 2.11 .34 1.31 2.8 .8 3.38 438
105 2 11.82 1.72 1.88 19.5 86 2.5 1.64 .37 1.42 2.06 .94 2.44 415
106 2 12.51 1.73 1.98 20.5 85 2.2 1.92 .32 1.48 2.94 1.04 3.57 672
107 2 12.42 2.55 2.27 22 90 1.68 1.84 .66 1.42 2.7 .86 3.3 315
108 2 12.25 1.73 2.12 19 80 1.65 2.03 .37 1.63 3.4 1 3.17 510
109 2 12.72 1.75 2.28 22.5 84 1.38 1.76 .48 1.63 3.3 .88 2.42 488
110 2 12.22 1.29 1.94 19 92 2.36 2.04 .39 2.08 2.7 .86 3.02 312
111 2 11.61 1.35 2.7 20 94 2.74 2.92 .29 2.49 2.65 .96 3.26 680
112 2 11.46 3.74 1.82 19.5 107 3.18 2.58 .24 3.58 2.9 .75 2.81 562
113 2 12.52 2.43 2.17 21 88 2.55 2.27 .26 1.22 2 .9 2.78 325
114 2 11.76 2.68 2.92 20 103 1.75 2.03 .6 1.05 3.8 1.23 2.5 607
115 2 11.41 .74 2.5 21 88 2.48 2.01 .42 1.44 3.08 1.1 2.31 434
116 2 12.08 1.39 2.5 22.5 84 2.56 2.29 .43 1.04 2.9 .93 3.19 385
117 2 11.03 1.51 2.2 21.5 85 2.46 2.17 .52 2.01 1.9 1.71 2.87 407
118 2 11.82 1.47 1.99 20.8 86 1.98 1.6 .3 1.53 1.95 .95 3.33 495
119 2 12.42 1.61 2.19 22.5 108 2 2.09 .34 1.61 2.06 1.06 2.96 345
120 2 12.77 3.43 1.98 16 80 1.63 1.25 .43 .83 3.4 .7 2.12 372
121 2 12 3.43 2 19 87 2 1.64 .37 1.87 1.28 .93 3.05 564
122 2 11.45 2.4 2.42 20 96 2.9 2.79 .32 1.83 3.25 .8 3.39 625
123 2 11.56 2.05 3.23 28.5 119 3.18 5.08 .47 1.87 6 .93 3.69 465
124 2 12.42 4.43 2.73 26.5 102 2.2 2.13 .43 1.71 2.08 .92 3.12 365
125 2 13.05 5.8 2.13 21.5 86 2.62 2.65 .3 2.01 2.6 .73 3.1 380
126 2 11.87 4.31 2.39 21 82 2.86 3.03 .21 2.91 2.8 .75 3.64 380
127 2 12.07 2.16 2.17 21 85 2.6 2.65 .37 1.35 2.76 .86 3.28 378
128 2 12.43 1.53 2.29 21.5 86 2.74 3.15 .39 1.77 3.94 .69 2.84 352
129 2 11.79 2.13 2.78 28.5 92 2.13 2.24 .58 1.76 3 .97 2.44 466
130 2 12.37 1.63 2.3 24.5 88 2.22 2.45 .4 1.9 2.12 .89 2.78 342
131 2 12.04 4.3 2.38 22 80 2.1 1.75 .42 1.35 2.6 .79 2.57 580
132 3 12.86 1.35 2.32 18 122 1.51 1.25 .21 .94 4.1 .76 1.29 630
133 3 12.88 2.99 2.4 20 104 1.3 1.22 .24 .83 5.4 .74 1.42 530
134 3 12.81 2.31 2.4 24 98 1.15 1.09 .27 .83 5.7 .66 1.36 560
135 3 12.7 3.55 2.36 21.5 106 1.7 1.2 .17 .84 5 .78 1.29 600
136 3 12.51 1.24 2.25 17.5 85 2 .58 .6 1.25 5.45 .75 1.51 650
137 3 12.6 2.46 2.2 18.5 94 1.62 .66 .63 .94 7.1 .73 1.58 695
138 3 12.25 4.72 2.54 21 89 1.38 .47 .53 .8 3.85 .75 1.27 720
139 3 12.53 5.51 2.64 25 96 1.79 .6 .63 1.1 5 .82 1.69 515
140 3 13.49 3.59 2.19 19.5 88 1.62 .48 .58 .88 5.7 .81 1.82 580
141 3 12.84 2.96 2.61 24 101 2.32 .6 .53 .81 4.92 .89 2.15 590
142 3 12.93 2.81 2.7 21 96 1.54 .5 .53 .75 4.6 .77 2.31 600
143 3 13.36 2.56 2.35 20 89 1.4 .5 .37 .64 5.6 .7 2.47 780
144 3 13.52 3.17 2.72 23.5 97 1.55 .52 .5 .55 4.35 .89 2.06 520
145 3 13.62 4.95 2.35 20 92 2 .8 .47 1.02 4.4 .91 2.05 550
146 3 12.25 3.88 2.2 18.5 112 1.38 .78 .29 1.14 8.21 .65 2 855
147 3 13.16 3.57 2.15 21 102 1.5 .55 .43 1.3 4 .6 1.68 830
148 3 13.88 5.04 2.23 20 80 .98 .34 .4 .68 4.9 .58 1.33 415
149 3 12.87 4.61 2.48 21.5 86 1.7 .65 .47 .86 7.65 .54 1.86 625
150 3 13.32 3.24 2.38 21.5 92 1.93 .76 .45 1.25 8.42 .55 1.62 650
151 3 13.08 3.9 2.36 21.5 113 1.41 1.39 .34 1.14 9.40 .57 1.33 550
152 3 13.5 3.12 2.62 24 123 1.4 1.57 .22 1.25 8.60 .59 1.3 500
153 3 12.79 2.67 2.48 22 112 1.48 1.36 .24 1.26 10.8 .48 1.47 480
154 3 13.11 1.9 2.75 25.5 116 2.2 1.28 .26 1.56 7.1 .61 1.33 425
155 3 13.23 3.3 2.28 18.5 98 1.8 .83 .61 1.87 10.52 .56 1.51 675
156 3 12.58 1.29 2.1 20 103 1.48 .58 .53 1.4 7.6 .58 1.55 640
157 3 13.17 5.19 2.32 22 93 1.74 .63 .61 1.55 7.9 .6 1.48 725
158 3 13.84 4.12 2.38 19.5 89 1.8 .83 .48 1.56 9.01 .57 1.64 480
159 3 12.45 3.03 2.64 27 97 1.9 .58 .63 1.14 7.5 .67 1.73 880
160 3 14.34 1.68 2.7 25 98 2.8 1.31 .53 2.7 13 .57 1.96 660
161 3 13.48 1.67 2.64 22.5 89 2.6 1.1 .52 2.29 11.75 .57 1.78 620
162 3 12.36 3.83 2.38 21 88 2.3 .92 .5 1.04 7.65 .56 1.58 520
163 3 13.69 3.26 2.54 20 107 1.83 .56 .5 .8 5.88 .96 1.82 680
164 3 12.85 3.27 2.58 22 106 1.65 .6 .6 .96 5.58 .87 2.11 570
165 3 12.96 3.45 2.35 18.5 106 1.39 .7 .4 .94 5.28 .68 1.75 675
166 3 13.78 2.76 2.3 22 90 1.35 .68 .41 1.03 9.58 .7 1.68 615
167 3 13.73 4.36 2.26 22.5 88 1.28 .47 .52 1.15 6.62 .78 1.75 520
168 3 13.45 3.7 2.6 23 111 1.7 .92 .43 1.46 10.68 .85 1.56 695
169 3 12.82 3.37 2.3 19.5 88 1.48 .66 .4 .97 10.26 .72 1.75 685
170 3 13.58 2.58 2.69 24.5 105 1.55 .84 .39 1.54 8.66 .74 1.8 750
171 3 13.4 4.6 2.86 25 112 1.98 .96 .27 1.11 8.5 .67 1.92 630
172 3 12.2 3.03 2.32 19 96 1.25 .49 .4 .73 5.5 .66 1.83 510
173 3 12.77 2.39 2.28 19.5 86 1.39 .51 .48 .64 9.899999 .57 1.63 470
174 3 14.16 2.51 2.48 20 91 1.68 .7 .44 1.24 9.7 .62 1.71 660
175 3 13.71 5.65 2.45 20.5 95 1.68 .61 .52 1.06 7.7 .64 1.74 740
176 3 13.4 3.91 2.48 23 102 1.8 .75 .43 1.41 7.3 .7 1.56 750
177 3 13.27 4.28 2.26 20 120 1.59 .69 .43 1.35 10.2 .59 1.56 835
178 3 13.17 2.59 2.37 20 120 1.65 .68 .53 1.46 9.3 .6 1.62 840
179 3 14.13 4.1 2.74 24.5 96 2.05 .76 .56 1.35 9.2 .61 1.6 560

Binary file not shown.

After

Width:  |  Height:  |  Size: 134 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 364 KiB

File diff suppressed because one or more lines are too long