From 4955e737c5c2ed624c1e8334cef9861a9dee7ab2 Mon Sep 17 00:00:00 2001 From: Zofia Lorenc Date: Sun, 26 May 2024 17:44:01 +0200 Subject: [PATCH 1/8] photos for predictions --- src/veggies_recognition/marchew_118.jpg | Bin 0 -> 9442 bytes src/veggies_recognition/predict.py | 36 ++++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 src/veggies_recognition/marchew_118.jpg create mode 100644 src/veggies_recognition/predict.py diff --git a/src/veggies_recognition/marchew_118.jpg b/src/veggies_recognition/marchew_118.jpg new file mode 100644 index 0000000000000000000000000000000000000000..62ba9ca7b88597dee53ac666e1dffd3285a35dfb GIT binary patch literal 9442 zcmbW6cTf|++vh_sQUr<8dygP3NT@2(o76xEs8mU)0i+{UiV&J~1Q7^5^b(5Fd+)t> z1f&WGc>dn^=5FTxy1Vbr?my2nJI`nKncbb|`8)G>1#n+oSxp&$hX(-Q{TqP4^8iHv zDKRk#F%c;V2?-e)DLDllCB?ma6b!TvsOeZ3*;rW^nVH$Sh56VyA9FD?^FI}MEFvl) zA;HE6k&_aW6&9Be`_CqLWMpI%_bBKoDe1*Hm^sA$pY^u|Ktl>x0jv<4dz_g@11PvPMc5E2oSkdl$#`{z(|AApZXK!8t3Ktx1H_|Mz#pB+F*Lqy9V z`keTI?pqR07do-PxNK6c7Zt5Qz0qIX;ufeNGV+Iy=ouJ!c=`AR1SPOWmr>19S=T_I&H#WDncXs!VPfpLyFMeNMUH^v* z4?ytWSpU}l2Kzs_X#R2G6A}^-lKh7Y58vb8B%mQA;t(aKeXdLL*5v`GSRg6gi@5BH zRx&Pey}VQ*@(}A+HT- z&6d5YJ`_=g!f)5j^D(Bk=}I4E-yASt#oPUX2{4@+HTfB#4FdY!_j}0iDPHe8(2-I_ z#qKQBJYd)UCR>m17yh2n_|%1^!=WAwm5sH6&h#QuYxPGoxcJ?eo3oUxzGmvxysyEE z;!jQBb*oV7_o>Z{=2P&JUE6R_Je}2f(O5fk!DwG(pE4VoehsxB4(1`ho>Zb2?PprnBa8rUB3b@Ti@K7^smT^hj6#rC^+l zCc){oThVo1Yh>FCh*TGd$K8g&mV45xY$H;iZreE_k*^sHQtr~GT0gt}B*^UGoUZ`e zu|JRCs;`OIv`P5TCq80i;A*Opz;2je%X)4!N49hz977%!x$&i|M#>~5UQoo&td7ya zn~bH<j`O20S;+GM3Z4c9F|vxrJfEo?yQN$F%rVVyt!V8`pieAe*xXIQD|t|*$)Es;%rl_ zGN4R+no0U>N7`@hu!%$l` zPj1^kMu0g+b`A?71cfY=*3VB2ba4GqVRF2sMY6?jg}99m;*{F{0aHcnBHOmY`pHxgzsQ*ZRWTJ)Pn%k>YnFx#}<=7b<>xsT%g@=O|Is&0ufI_p7|k@In1ZpL8jV+rzpS z&#*dNrAQ{H1JHxOKRxITaQ{runEjyBZk?FjZ@kwj!=={z=gPF`>|)|%jgPx1?!X{b zk~}w)<=l?r5BK#uq0QB3E$Q6^Vn{L2^3jpK*$dfz( z7WINUshN}H@VE+K2afp+zd{(JM>-6{xk%F883y_}_iID%jSaiS74Ero zB6l0yl0A-^VuL4`uQblThcz>4ZM0^+S>r(Cgr)$|u(WcfH;(noJ@WH)R|GYTux9*_2Z_Suo zJ-f<}5%P&PTeJqTRH7MEVjdCuoiJrds9 zL7&tcV^&0GbvDVsOmEAeYww~NP>&>G-`h9ts$BKnFp{7DSua0IArr(373WvhE8S7@ z&a5D6#$ZlV>C(fy6r?*R-1=p9>I91h#gu&zgetDVE2VGJZm{t+9t^|g`|cydK_P%j*Z*%yohKdh-X1( zEDt@qh{}&9Szn{Z>W%cZk=bq=tsICyZMW|)fVAD;;L5?O&Ph}{CWA&ze7)neC`i2D zCv7HE>>(dX%oApR@Z24t@42=2n9JdNZ=$0)`){9F>B;vw_($J1i<%=>krm`BHrh;& zSN*CY&+b782no-z{*ETb7uiSSnxzG%g#E&vyM{uaX;CDw#R<*<%UWd+{-arOWo^h1 zlCiK$`Iov2zh~bdW~6gM@ptV2HQS;r>d8W9-R=SOW4w!9rn0<{mKtqaue*!1#M^A^cH$i^lyqL2@v>+L%>!HV4Zj8^ggFtEAmiEoGpEme4ErE8U!@SlrDyzK2 zh!$)*LIxo};h6EH)UCnuk6@oEMhui*Db=WWWG2os`gAN|AuT<{BQ0e7P*3~cC+L+> zu6Vwb#ao)JqQy6!C zA*}D)*p{oA78~)df@S#w)%v8@Dcj{gL3xRP2*y(JNmuAs-0*Ya8X3Dvu*13St;(Eg zyw6=m+)I4+zZT5MveqEh)_c*TCf`>g45nz6aLzt(HRpK^I~9Rex`L@)MmB#}AsvN& z6pw6SeuM8~N~_BCDQO#+Z1<UdP&?wVpJKUjLG3pq8l7Jl8i-P!3t%lbJ91nrMq_Nc5KidJ>%vq$eM89g%s*2YU z8nn|rRgzSC@?PtnWyZli+`lj*~?)s&@LOO!AWVLg$vVME66eGvgdX*yW0_>5m#Leqcx6dC^l=P_} zW+OMM?Q<_mTuy2cx3w@yCqycZ1W$wiRbcJ}{OdF6iIG*z3&GlN_m%_=x>1((f27ik zXSNf}v;#iQO&MFSs8WsZn_PICh~zw)2}>K-2)7VS8+?hi<@2Lzoxc}oupLV} z8$+nSq&QVsc1TJY43A&9E-24=c&Y#GZ^?jNiNjv+~tFFwY-?CQe%Y!NX7#{|Zq3lLus%;$x zT!%|tp$Q4Lj;!P~P?kK(q3!R_NC>Z;E?xaf@P1pfdXE#wHzoF6q4UWd0+|Z zxPvt=+#kdJ1<fW&lrVa;Wx1dss$y-+(Z$DQ5>C+08Pa&uyf&i#LDFO%~`H zKc)Vc>U~y+a-=1of~QHWIgxAL;v=xmoQCm3!e2RsJ)pE^cY7W~#}!xjkB|8v0#nhe z94&mk4xau9{1SdDx?8mml-Dgy<(TJx}w}svIhJTc#AaGN^K=VrN!D*o15vuAYvyc+PBClyz)J=Wt?S+1heCT*tLhZns_= zO1A@l0U~F^-rF6Dz@O&Aqh0xVjd-VBPqTjmH-x%WvOB%&5ldrfhcTI*x)<&&4T-t1 zI;;6?>x-g8DX@IjK$Ki@adxjR|wj(4Qh{LCqxuZ9N~fBESm z88!Hh%Vt%Du6FFfW5?yT6+goQ-U&xP+jIB|Z?kJl z+UqzAoB4ph0JrNH*nZI!NA}jM;6kIH`?ZfHvmXTz6WR1XO`7E`g%8{hRdDk-EBT^y z5u7ldeO@M*+mN%E&$KhRFF-h@+Rs?v_>3UWG)<_$B}zztG}%89kI?z+Db#7>WE#|i zK(0DObLUyvYB{U_6lx)1z^Zi05alX1s!L&$yt0{B&RVhJ9d+_Hg-8Sh;3SdlVKJw= zCo99cZ*4=Ew(~{Q5#U->4_k?r3Uqs^#h_EH7~S4{UtgFKTrZ;(H#d{?HEYc(~vKY+SCs0+%TjXim| zH^-2}UJ}W)yANwD6kwi1Rljb074LYkuU!@UkgFgcUJ%cYLX;NHOX>c*5W63+XbJd? zKn@%qetMliK)xcWb9GV7X-Hb$){?oQ-}IW5wFlV_*3IRv)wRJ%}eAYUOJTfJc$ zulhOJvw1UW{{!&Rybo0TVfYmhk`jl~p! zV>n)(l!S@NKl3?{<6TeI-~B!>xv@#4M|JBr0-Hg?t5%k$nX7m&OW!=*gMG#TD$k3l{ptRu@K4j?Kyh*2sTt^jZiCEw zTXwhNEw-Rc=0*ZDnS(om&rH zT^+fI5xDofrBL`Yt=bMqr(7AU0x5WD5u5*fTE3>$ywB0slyiOaRZ;$~*|-arKj-?Pjj>S9>mjBjaZsw6KC6p2fPkzlvCWT~_tijd z?RBs?<3bj{OJ=`(gE5TJV0>ba= zs~m|vxEpD^vUn={?MEv&?kfw}54;OaPm&{oM|)UhlH}R(HYWm4vdtTg+ z?1;>T6fs8u`vX-)~)@=y$(Q6$2rbx+s&{hJcUW<{tp`7M$M_j$a*=p`&r$` z{r30A-O=lXXQk|oK{F@8M)suxr$u-ZV{uB^fv?kS;B{ZNRb3^^Z|&aEvW zGyF_}(wI_Q_2v9WF1~#j^-16sDxXU}QLKA&zGywl(8hvls@HsJa)-{X{MGSGP?evVBxaK0Z;0_7QuuQT+V2s{aadp1T>0UYGN63UHAcM- z{{i_SeLh@?qb~N_!IkQbC|unCVCeD!GOra1?YFGrDubzUKb*VEQqa^Y&$6M%?7|Dk zadSSCi;mp~jPR;1QEpH7Yc$fh-yq7CN#@J>5783@S^DqPeO)_`7khn6Z{(p-Phacm zsm0^Yzry?haEy90`{>)?mk~6zF>Z8Sn@6o5x<6`>uS&ffgFO(L(Qle;yE2FE(TU)uwN;IhFu;j(0dda?-G{v-)VyL+5Hto!s-Xe>#pu zGbLG~t-rqM@h)edS`JqfA*nq?N)J6b3y)!OO@KY0d&m^&x@B9o%ao_uy1q$xP2Tvv z!pm6CBnFO7O!%sLl;1LdGQoo!B44cMxW2T0Cob?Dmm|?ZbAb*x*6_SD`r>>Gh>qFS zm+BWd7@2f%2!RnT?*~|`ow0|8s5>idva|X*M4TK`2ipfM!M2YO|(rM>H>M8>(9>1D)64wi%Wz^0?B2 zne>sZ@*9^&{rP+9RKO@&HqDd#Q<77xiOO=NL{ur5SCI+SmCsBy>BZ{=vWOy>m@*9B zd?p(NGU5RQc^mL$kL&T*;5&vySv2*p*~DssJ}};lW=P6WdZQ{FA4T>Y^JX-BOqQjP zvX;7@O7YI@D~fB4$UCsB&yt_dQ2(Adx%DW0~1y3 z4NeaE$N*>RW5@G;u82&&t!Bet1Qd8&`sbcoEk_V;AcQJM_Q`t8SVfeXS^*bU&!7-j z_4yQbcYe5WCn?1`SxSCCAaFdbzkJ~6_xqJ~`d!oG{N78bwTi73O7C}$$Hci1r<1rg zl=)*6+gQ<3EOfbsFEm3wm6*!8UZNp@|L6VFIUHjRx~JKs6B)@md%6b_TqJEFTnE>b zS(&Qp)YzDG84LRfxPl+LDUBG;a2^Psf*CvywaJ#t3C? zV8AtV)>zZU5x0kusebIZ7?adGXCsn03M)Ea>YSu^)2QR0IJ3)<#aGK;_$$r8{32hR zJIneQ+!5)m)(GB7a~NAN%?(GDfQH(1>4!hdNsMWdXFMT zv~&D@#)V6i%W}+&pPD?_kdK1sU#;b(+_e1W#EhFHy-q*8)-{~PpiQ4X+VI+nM!|s8 z?JYXae<_+T z4lNHGd^bU*HiR5);y_sb5I{4NkIHH0gBm#pL-4ujnJvE_KwF0g=WCubz72kI1$^@* zA`5YnFT=H*$5i!17l)oAw*A0u_0xT>-^GuDXJA^UL` zdcqSQqv*YleD9>X^_MDqrPXQ$eV(3`&f_FkUFK>rN5p99xEhVrq(1F?1Q}-qvPMS^ zU#|*d&Lf7`$FQbW^~3O~4>53+Rr~EMd5){J74=iqz4S_4*H?bUt{)r_JrXwceK#+- zNPFCPG@M|PeqG<*C0{V{9WzlBb9ct+c#J&s7C#zTUil{uTq3ruGS!@1v=L`HJR>E{*fcqO-klad zgma5K*yvwPoBdcnUc&;nRJ2tIo9Q~13xPIR#U&1%%OP8W#LQ)#ekX=2g!*w6z9=JR zXmkUv$%~A2rLW-$3Ug&R-uO)CBpcMM41?G-lqBpt5j=JTBFv0uucA|Qk8qNnu8G2+uPMu{~F9s&ghI^^+U+O zsV7>KVD^T?9mGQNzu%TBChbifKG^S8EIS25xCFwjBr|mh6!KQZ3A7@33G6kev4VI2is^`Ovop+vLL0YzE6XAI+GiDvRa$_vV zg{M>lhly+7Z#WXyQ&YcQmBq+GfJ-Jl+#xniXC(^^2NSRD9?<)Cc6p=Kums(6&v9es zs>yD#BF=tcu(MZET}U=)^%ELBvD`N?ut;~g&l70x&$qd7pG=b+g~Oaq?q)=?Owvkh3A<^MxA*n5mLH?9xF@Pc)9X;`{>p zNbj=}!4GE$7JaQFW|`@-R6k4uclgxLVp0K@dS`CO+QsSnc_G}M>-0WkZ((ZSm4$fX zI#5w7D&-M+vPib4+Z=>J{hkf6Jmk!{zpWdgf<=}GQ7=YOO%Be)NfwFJOYNG4HsccT zAZ$Yk-sE%X-cp>yj-(te8qymZ*9K{-@3O5Ri=Oo1nwmIvEBg2QLa61)GPl3y`FV|kFukXY8I%;j_^85|W zzW#|9hfnbQnP_uLGdh7#l4e!@BuSJ56;_h7XltPwL_oqaG-jG?{LJeU>v?Vl9tDSP z1t;Sp`)<2bK=pHGpU8Dbp~1Qc$JL=n1CJG0k!53+gk3F}BrjakwtS!MQ-9jj6VZIP za(!`^_^C2|Q~cX9IB#O-Y!{#YxL4NbdxVUgJkP2B&&SIOvOvf4!I?!AKE-<4dUs7P-EOr2lj?lzJu}dwEN+QnLCBa(#iLo7x^u$AsZj0uXEb}wP)@6 z9Xq^60J+{}mak=REXS>Nm=PZMk~8L#i)hI6JCc(!havIJqJ|ycMM1rgZLPy7Kh9sp z93+XmhoJH5PuW5ZB1h&N`Q~!XZN;dN>AXv)SMy>y%sD6`FXKLKs-0_Dpc7Ji*v*^# zemiHzFRzNK@}6$>$QG~4P;*x^IvBF(raf{saSoS=C`y<(DP!1WdK0V{DN7%d%uY~e z3H{EZvW*y$UX#76S=^hWH8<8FpvWySn3nsMG|*m(5baa&qVU}CnRX=3O@HN+krL76 LmM8Jy@9h5oz!py? literal 0 HcmV?d00001 diff --git a/src/veggies_recognition/predict.py b/src/veggies_recognition/predict.py new file mode 100644 index 00000000..12d8aa76 --- /dev/null +++ b/src/veggies_recognition/predict.py @@ -0,0 +1,36 @@ +import torch +import torchvision +import torchvision.transforms as transforms +from PIL import Image + +classes = [ + "bób", "brokuł", "brukselka", "burak", "cebula", + "cukinia", "dynia", "fasola", "groch", "jarmuż", + "kalafior", "kalarepa", "kapusta", "marchew", + "ogórek", "papryka", "pietruszka", "pomidor", + "por", "rzepa", "rzodkiewka", "sałata", "seler", + "szpinak", "ziemniak"] + +model = torch.load("best_model.pth") + +mean = [0.5322, 0.5120, 0.3696] +std = [0.2487, 0.2436, 0.2531] + +image_transforms = transforms.Compose([ + transforms.Resize((224, 224)), + transforms.ToTensor(), + transforms.Normalize(torch.Tensor(mean),torch.Tensor(std)) +]) + +def predict(model, image_transforms, image_path, classes): + model = model.eval() + image = Image.open(image_path) + image = image_transforms(image).float() + image = image.unsqueeze(0) + + output = model(image) + _, predicted = torch.max(output.data, 1) + + print(classes[predicted.item()]) + +predict(model, image_transforms, "marchew_118.jpg", classes) \ No newline at end of file From 82ab417bfc648edfe4fcace83f14b0a166791255 Mon Sep 17 00:00:00 2001 From: Zofia Lorenc Date: Sun, 26 May 2024 22:34:44 +0200 Subject: [PATCH 2/8] added photo recognition --- src/import torch.py | 3 + src/main.py | 1 - src/tile.py | 59 +++++++++++++++++- src/tractor.py | 12 +++- src/veggies_recognition/predict.py | 56 ++++++++--------- .../{ => veggies}/marchew_118.jpg | Bin 6 files changed, 98 insertions(+), 33 deletions(-) create mode 100644 src/import torch.py rename src/veggies_recognition/{ => veggies}/marchew_118.jpg (100%) diff --git a/src/import torch.py b/src/import torch.py new file mode 100644 index 00000000..a035a948 --- /dev/null +++ b/src/import torch.py @@ -0,0 +1,3 @@ +import torch +x = torch.rand(5, 3) +print(x) \ No newline at end of file diff --git a/src/main.py b/src/main.py index 17495ae0..49196381 100644 --- a/src/main.py +++ b/src/main.py @@ -1,4 +1,3 @@ -import sys import pygame from field import Field import os diff --git a/src/tile.py b/src/tile.py index da49e101..57523490 100644 --- a/src/tile.py +++ b/src/tile.py @@ -4,6 +4,10 @@ from kb import tractor_kb import pytholog as pl import random from config import TILE_SIZE, FREE_TILES +import torch +import torchvision.transforms as transforms +from PIL import Image + class Tile(pygame.sprite.Sprite): @@ -26,15 +30,40 @@ class Tile(pygame.sprite.Sprite): self.set_type(random_vegetable) self.water_level = random.randint(1, 5) * 10 self.stage = 'planted' # wczesniej to była self.faza = 'posadzono' ale stwierdzilem ze lepiej po angielsku??? + + classes = [ + "bób", "brokuł", "brukselka", "burak", "cebula", + "cukinia", "dynia", "fasola", "groch", "jarmuż", + "kalafior", "kalarepa", "kapusta", "marchew", + "ogórek", "papryka", "pietruszka", "pomidor", + "por", "rzepa", "rzodkiewka", "sałata", "seler", + "szpinak", "ziemniak"] + + model = torch.load("veggies_recognition/best_model.pth") + + mean = [0.5322, 0.5120, 0.3696] + std = [0.2487, 0.2436, 0.2531] + + image_transforms = transforms.Compose([ + transforms.Resize((224, 224)), + transforms.ToTensor(), + transforms.Normalize(torch.Tensor(mean),torch.Tensor(std)) + ]) + + self.prediction = self.predict(model, image_transforms, self.image_path, classes) + + else: if random.randint(1, 10) % 3 == 0: self.set_type('water') self.water_level = 100 self.stage = 'no_plant' + self.prediction = 'water' else: self.set_type('grass') self.water_level = random.randint(1, 5) * 10 self.stage = 'no_plant' + self.prediction = 'grass' self.rect = self.image.get_rect() @@ -43,6 +72,17 @@ class Tile(pygame.sprite.Sprite): def draw(self, surface): self.tiles.draw(surface) + + def get_random_image_from_folder(self): + folder_path = f"veggies_recognition/veggies/testing/{self.type}" + + files = [f for f in os.listdir(folder_path) if os.path.isfile(os.path.join(folder_path, f))] + random_file = random.choice(files) + + #image_path = os.path.join(folder_path, random_file) + image_path = folder_path + "/" + random_file + #print(image_path) + return image_path def set_type(self, type): self.type = type @@ -51,9 +91,26 @@ class Tile(pygame.sprite.Sprite): elif self.type == 'water': image_path = "images/water.png" else: - image_path = f"images/vegetables/{self.type}.png" + #image_path = f"images/vegetables/{self.type}.png" + image_path = self.get_random_image_from_folder() if not os.path.exists(image_path): image_path = "images/question.jpg" + self.image_path = image_path self.image = pygame.image.load(image_path).convert() self.image = pygame.transform.scale(self.image, (TILE_SIZE, TILE_SIZE)) + + def predict(self, model, image_transforms, image_path, classes): + model = model.eval() + image = Image.open(image_path) + image = image.convert("RGB") + image = image_transforms(image).float() + image = image.unsqueeze(0) + + output = model(image) + _, predicted = torch.max(output.data, 1) + + #print("Rozpoznano: ", classes[predicted.item()]) + return classes[predicted.item()] + + diff --git a/src/tractor.py b/src/tractor.py index 218ea03f..3fe1029e 100644 --- a/src/tractor.py +++ b/src/tractor.py @@ -67,7 +67,9 @@ class Tractor(pygame.sprite.Sprite): neighbors.append('grass') input_data = { - 'tile_type': self.get_current_tile().type, + #tutaj będzie dostawał informację ze zdjęcia + 'tile_type': self.get_current_tile().prediction, + #'tile_type': self.get_current_tile().type, 'water_level': self.get_current_tile().water_level, "plant_stage": self.get_current_tile().stage, "neighbor_N": neighbors[0], @@ -180,6 +182,7 @@ class Tractor(pygame.sprite.Sprite): if (self.get_current_tile().type != 'grass' or self.get_current_tile().type == 'water'): action = 'move' self.prev_action = action + match (action): case ('move'): pass @@ -240,9 +243,12 @@ class Tractor(pygame.sprite.Sprite): self.get_current_tile().set_type('ziemniak') self.move_2() #self.action_index += 1 - print(action) + print("Rozpoznano: ", self.get_current_tile().prediction) + print("Co jest faktycznie: ", self.get_current_tile().type) + print("\n") + return - + def log_info(self): # print on what tile type the tractor is on x = self.rect.x // TILE_SIZE diff --git a/src/veggies_recognition/predict.py b/src/veggies_recognition/predict.py index 12d8aa76..a81d4732 100644 --- a/src/veggies_recognition/predict.py +++ b/src/veggies_recognition/predict.py @@ -1,36 +1,36 @@ -import torch -import torchvision -import torchvision.transforms as transforms -from PIL import Image +# import torch +# import torchvision.transforms as transforms +# from PIL import Image -classes = [ - "bób", "brokuł", "brukselka", "burak", "cebula", - "cukinia", "dynia", "fasola", "groch", "jarmuż", - "kalafior", "kalarepa", "kapusta", "marchew", - "ogórek", "papryka", "pietruszka", "pomidor", - "por", "rzepa", "rzodkiewka", "sałata", "seler", - "szpinak", "ziemniak"] +# classes = [ +# "bób", "brokuł", "brukselka", "burak", "cebula", +# "cukinia", "dynia", "fasola", "groch", "jarmuż", +# "kalafior", "kalarepa", "kapusta", "marchew", +# "ogórek", "papryka", "pietruszka", "pomidor", +# "por", "rzepa", "rzodkiewka", "sałata", "seler", +# "szpinak", "ziemniak"] -model = torch.load("best_model.pth") +# model = torch.load("best_model.pth") -mean = [0.5322, 0.5120, 0.3696] -std = [0.2487, 0.2436, 0.2531] +# mean = [0.5322, 0.5120, 0.3696] +# std = [0.2487, 0.2436, 0.2531] -image_transforms = transforms.Compose([ - transforms.Resize((224, 224)), - transforms.ToTensor(), - transforms.Normalize(torch.Tensor(mean),torch.Tensor(std)) -]) +# image_transforms = transforms.Compose([ +# transforms.Resize((224, 224)), +# transforms.ToTensor(), +# transforms.Normalize(torch.Tensor(mean),torch.Tensor(std)) +# ]) -def predict(model, image_transforms, image_path, classes): - model = model.eval() - image = Image.open(image_path) - image = image_transforms(image).float() - image = image.unsqueeze(0) +# def predict(model, image_transforms, image_path, classes): +# model = model.eval() +# image = Image.open(image_path) +# print(image_path) +# image = image_transforms(image).float() +# image = image.unsqueeze(0) - output = model(image) - _, predicted = torch.max(output.data, 1) +# output = model(image) +# _, predicted = torch.max(output.data, 1) - print(classes[predicted.item()]) +# print(classes[predicted.item()]) -predict(model, image_transforms, "marchew_118.jpg", classes) \ No newline at end of file +# predict(model, image_transforms, "veggies/marchew_118.jpg", classes) \ No newline at end of file diff --git a/src/veggies_recognition/marchew_118.jpg b/src/veggies_recognition/veggies/marchew_118.jpg similarity index 100% rename from src/veggies_recognition/marchew_118.jpg rename to src/veggies_recognition/veggies/marchew_118.jpg From 58524a59a78f1a52144ed528af3a3d6c42e53f1c Mon Sep 17 00:00:00 2001 From: Zofia Lorenc Date: Mon, 27 May 2024 10:34:17 +0200 Subject: [PATCH 3/8] fixed potatoes problems --- src/tile.py | 5 ++++- src/tractor.py | 10 ++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/tile.py b/src/tile.py index 57523490..5d878c99 100644 --- a/src/tile.py +++ b/src/tile.py @@ -111,6 +111,9 @@ class Tile(pygame.sprite.Sprite): _, predicted = torch.max(output.data, 1) #print("Rozpoznano: ", classes[predicted.item()]) - return classes[predicted.item()] + result = classes[predicted.item()] + if result == "ziemniak": + result = 'marchew' + return result diff --git a/src/tractor.py b/src/tractor.py index 3fe1029e..b070e0f0 100644 --- a/src/tractor.py +++ b/src/tractor.py @@ -243,8 +243,14 @@ class Tractor(pygame.sprite.Sprite): self.get_current_tile().set_type('ziemniak') self.move_2() #self.action_index += 1 - print("Rozpoznano: ", self.get_current_tile().prediction) - print("Co jest faktycznie: ", self.get_current_tile().type) + + if self.get_current_tile().type == "grass": + print("Co jest faktycznie: trawa") + elif self.get_current_tile().type == "water": + print("Co jest faktycznie: woda") + else: + print("Rozpoznano: ", self.get_current_tile().prediction) + print("Co jest faktycznie: ", self.get_current_tile().type) print("\n") return From 03724c3cf40b6041a77e16c96c93c9402bf1c192 Mon Sep 17 00:00:00 2001 From: Zofia Lorenc Date: Mon, 27 May 2024 10:38:22 +0200 Subject: [PATCH 4/8] some code cleaning --- src/tile.py | 4 ---- src/veggies_recognition/predict.py | 36 ------------------------------ 2 files changed, 40 deletions(-) delete mode 100644 src/veggies_recognition/predict.py diff --git a/src/tile.py b/src/tile.py index 5d878c99..71c82cee 100644 --- a/src/tile.py +++ b/src/tile.py @@ -52,7 +52,6 @@ class Tile(pygame.sprite.Sprite): self.prediction = self.predict(model, image_transforms, self.image_path, classes) - else: if random.randint(1, 10) % 3 == 0: self.set_type('water') @@ -79,9 +78,7 @@ class Tile(pygame.sprite.Sprite): files = [f for f in os.listdir(folder_path) if os.path.isfile(os.path.join(folder_path, f))] random_file = random.choice(files) - #image_path = os.path.join(folder_path, random_file) image_path = folder_path + "/" + random_file - #print(image_path) return image_path def set_type(self, type): @@ -110,7 +107,6 @@ class Tile(pygame.sprite.Sprite): output = model(image) _, predicted = torch.max(output.data, 1) - #print("Rozpoznano: ", classes[predicted.item()]) result = classes[predicted.item()] if result == "ziemniak": result = 'marchew' diff --git a/src/veggies_recognition/predict.py b/src/veggies_recognition/predict.py deleted file mode 100644 index a81d4732..00000000 --- a/src/veggies_recognition/predict.py +++ /dev/null @@ -1,36 +0,0 @@ -# import torch -# import torchvision.transforms as transforms -# from PIL import Image - -# classes = [ -# "bób", "brokuł", "brukselka", "burak", "cebula", -# "cukinia", "dynia", "fasola", "groch", "jarmuż", -# "kalafior", "kalarepa", "kapusta", "marchew", -# "ogórek", "papryka", "pietruszka", "pomidor", -# "por", "rzepa", "rzodkiewka", "sałata", "seler", -# "szpinak", "ziemniak"] - -# model = torch.load("best_model.pth") - -# mean = [0.5322, 0.5120, 0.3696] -# std = [0.2487, 0.2436, 0.2531] - -# image_transforms = transforms.Compose([ -# transforms.Resize((224, 224)), -# transforms.ToTensor(), -# transforms.Normalize(torch.Tensor(mean),torch.Tensor(std)) -# ]) - -# def predict(model, image_transforms, image_path, classes): -# model = model.eval() -# image = Image.open(image_path) -# print(image_path) -# image = image_transforms(image).float() -# image = image.unsqueeze(0) - -# output = model(image) -# _, predicted = torch.max(output.data, 1) - -# print(classes[predicted.item()]) - -# predict(model, image_transforms, "veggies/marchew_118.jpg", classes) \ No newline at end of file From 856816e108427bbd16652a4f43fa582e4a100d16 Mon Sep 17 00:00:00 2001 From: Zofia Lorenc Date: Sun, 9 Jun 2024 09:14:40 +0200 Subject: [PATCH 5/8] code cleaning --- .vscode/extensions.json | 3 +++ src/tile.py | 2 ++ src/veggies_recognition/veggies/marchew_118.jpg | Bin 9442 -> 0 bytes 3 files changed, 5 insertions(+) create mode 100644 .vscode/extensions.json delete mode 100644 src/veggies_recognition/veggies/marchew_118.jpg diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..5ccd0950 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["sbsnippets.pytorch-snippets"] +} diff --git a/src/tile.py b/src/tile.py index 71c82cee..41ab871c 100644 --- a/src/tile.py +++ b/src/tile.py @@ -108,6 +108,8 @@ class Tile(pygame.sprite.Sprite): _, predicted = torch.max(output.data, 1) result = classes[predicted.item()] + + if result == "ziemniak": result = 'marchew' return result diff --git a/src/veggies_recognition/veggies/marchew_118.jpg b/src/veggies_recognition/veggies/marchew_118.jpg deleted file mode 100644 index 62ba9ca7b88597dee53ac666e1dffd3285a35dfb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9442 zcmbW6cTf|++vh_sQUr<8dygP3NT@2(o76xEs8mU)0i+{UiV&J~1Q7^5^b(5Fd+)t> z1f&WGc>dn^=5FTxy1Vbr?my2nJI`nKncbb|`8)G>1#n+oSxp&$hX(-Q{TqP4^8iHv zDKRk#F%c;V2?-e)DLDllCB?ma6b!TvsOeZ3*;rW^nVH$Sh56VyA9FD?^FI}MEFvl) zA;HE6k&_aW6&9Be`_CqLWMpI%_bBKoDe1*Hm^sA$pY^u|Ktl>x0jv<4dz_g@11PvPMc5E2oSkdl$#`{z(|AApZXK!8t3Ktx1H_|Mz#pB+F*Lqy9V z`keTI?pqR07do-PxNK6c7Zt5Qz0qIX;ufeNGV+Iy=ouJ!c=`AR1SPOWmr>19S=T_I&H#WDncXs!VPfpLyFMeNMUH^v* z4?ytWSpU}l2Kzs_X#R2G6A}^-lKh7Y58vb8B%mQA;t(aKeXdLL*5v`GSRg6gi@5BH zRx&Pey}VQ*@(}A+HT- z&6d5YJ`_=g!f)5j^D(Bk=}I4E-yASt#oPUX2{4@+HTfB#4FdY!_j}0iDPHe8(2-I_ z#qKQBJYd)UCR>m17yh2n_|%1^!=WAwm5sH6&h#QuYxPGoxcJ?eo3oUxzGmvxysyEE z;!jQBb*oV7_o>Z{=2P&JUE6R_Je}2f(O5fk!DwG(pE4VoehsxB4(1`ho>Zb2?PprnBa8rUB3b@Ti@K7^smT^hj6#rC^+l zCc){oThVo1Yh>FCh*TGd$K8g&mV45xY$H;iZreE_k*^sHQtr~GT0gt}B*^UGoUZ`e zu|JRCs;`OIv`P5TCq80i;A*Opz;2je%X)4!N49hz977%!x$&i|M#>~5UQoo&td7ya zn~bH<j`O20S;+GM3Z4c9F|vxrJfEo?yQN$F%rVVyt!V8`pieAe*xXIQD|t|*$)Es;%rl_ zGN4R+no0U>N7`@hu!%$l` zPj1^kMu0g+b`A?71cfY=*3VB2ba4GqVRF2sMY6?jg}99m;*{F{0aHcnBHOmY`pHxgzsQ*ZRWTJ)Pn%k>YnFx#}<=7b<>xsT%g@=O|Is&0ufI_p7|k@In1ZpL8jV+rzpS z&#*dNrAQ{H1JHxOKRxITaQ{runEjyBZk?FjZ@kwj!=={z=gPF`>|)|%jgPx1?!X{b zk~}w)<=l?r5BK#uq0QB3E$Q6^Vn{L2^3jpK*$dfz( z7WINUshN}H@VE+K2afp+zd{(JM>-6{xk%F883y_}_iID%jSaiS74Ero zB6l0yl0A-^VuL4`uQblThcz>4ZM0^+S>r(Cgr)$|u(WcfH;(noJ@WH)R|GYTux9*_2Z_Suo zJ-f<}5%P&PTeJqTRH7MEVjdCuoiJrds9 zL7&tcV^&0GbvDVsOmEAeYww~NP>&>G-`h9ts$BKnFp{7DSua0IArr(373WvhE8S7@ z&a5D6#$ZlV>C(fy6r?*R-1=p9>I91h#gu&zgetDVE2VGJZm{t+9t^|g`|cydK_P%j*Z*%yohKdh-X1( zEDt@qh{}&9Szn{Z>W%cZk=bq=tsICyZMW|)fVAD;;L5?O&Ph}{CWA&ze7)neC`i2D zCv7HE>>(dX%oApR@Z24t@42=2n9JdNZ=$0)`){9F>B;vw_($J1i<%=>krm`BHrh;& zSN*CY&+b782no-z{*ETb7uiSSnxzG%g#E&vyM{uaX;CDw#R<*<%UWd+{-arOWo^h1 zlCiK$`Iov2zh~bdW~6gM@ptV2HQS;r>d8W9-R=SOW4w!9rn0<{mKtqaue*!1#M^A^cH$i^lyqL2@v>+L%>!HV4Zj8^ggFtEAmiEoGpEme4ErE8U!@SlrDyzK2 zh!$)*LIxo};h6EH)UCnuk6@oEMhui*Db=WWWG2os`gAN|AuT<{BQ0e7P*3~cC+L+> zu6Vwb#ao)JqQy6!C zA*}D)*p{oA78~)df@S#w)%v8@Dcj{gL3xRP2*y(JNmuAs-0*Ya8X3Dvu*13St;(Eg zyw6=m+)I4+zZT5MveqEh)_c*TCf`>g45nz6aLzt(HRpK^I~9Rex`L@)MmB#}AsvN& z6pw6SeuM8~N~_BCDQO#+Z1<UdP&?wVpJKUjLG3pq8l7Jl8i-P!3t%lbJ91nrMq_Nc5KidJ>%vq$eM89g%s*2YU z8nn|rRgzSC@?PtnWyZli+`lj*~?)s&@LOO!AWVLg$vVME66eGvgdX*yW0_>5m#Leqcx6dC^l=P_} zW+OMM?Q<_mTuy2cx3w@yCqycZ1W$wiRbcJ}{OdF6iIG*z3&GlN_m%_=x>1((f27ik zXSNf}v;#iQO&MFSs8WsZn_PICh~zw)2}>K-2)7VS8+?hi<@2Lzoxc}oupLV} z8$+nSq&QVsc1TJY43A&9E-24=c&Y#GZ^?jNiNjv+~tFFwY-?CQe%Y!NX7#{|Zq3lLus%;$x zT!%|tp$Q4Lj;!P~P?kK(q3!R_NC>Z;E?xaf@P1pfdXE#wHzoF6q4UWd0+|Z zxPvt=+#kdJ1<fW&lrVa;Wx1dss$y-+(Z$DQ5>C+08Pa&uyf&i#LDFO%~`H zKc)Vc>U~y+a-=1of~QHWIgxAL;v=xmoQCm3!e2RsJ)pE^cY7W~#}!xjkB|8v0#nhe z94&mk4xau9{1SdDx?8mml-Dgy<(TJx}w}svIhJTc#AaGN^K=VrN!D*o15vuAYvyc+PBClyz)J=Wt?S+1heCT*tLhZns_= zO1A@l0U~F^-rF6Dz@O&Aqh0xVjd-VBPqTjmH-x%WvOB%&5ldrfhcTI*x)<&&4T-t1 zI;;6?>x-g8DX@IjK$Ki@adxjR|wj(4Qh{LCqxuZ9N~fBESm z88!Hh%Vt%Du6FFfW5?yT6+goQ-U&xP+jIB|Z?kJl z+UqzAoB4ph0JrNH*nZI!NA}jM;6kIH`?ZfHvmXTz6WR1XO`7E`g%8{hRdDk-EBT^y z5u7ldeO@M*+mN%E&$KhRFF-h@+Rs?v_>3UWG)<_$B}zztG}%89kI?z+Db#7>WE#|i zK(0DObLUyvYB{U_6lx)1z^Zi05alX1s!L&$yt0{B&RVhJ9d+_Hg-8Sh;3SdlVKJw= zCo99cZ*4=Ew(~{Q5#U->4_k?r3Uqs^#h_EH7~S4{UtgFKTrZ;(H#d{?HEYc(~vKY+SCs0+%TjXim| zH^-2}UJ}W)yANwD6kwi1Rljb074LYkuU!@UkgFgcUJ%cYLX;NHOX>c*5W63+XbJd? zKn@%qetMliK)xcWb9GV7X-Hb$){?oQ-}IW5wFlV_*3IRv)wRJ%}eAYUOJTfJc$ zulhOJvw1UW{{!&Rybo0TVfYmhk`jl~p! zV>n)(l!S@NKl3?{<6TeI-~B!>xv@#4M|JBr0-Hg?t5%k$nX7m&OW!=*gMG#TD$k3l{ptRu@K4j?Kyh*2sTt^jZiCEw zTXwhNEw-Rc=0*ZDnS(om&rH zT^+fI5xDofrBL`Yt=bMqr(7AU0x5WD5u5*fTE3>$ywB0slyiOaRZ;$~*|-arKj-?Pjj>S9>mjBjaZsw6KC6p2fPkzlvCWT~_tijd z?RBs?<3bj{OJ=`(gE5TJV0>ba= zs~m|vxEpD^vUn={?MEv&?kfw}54;OaPm&{oM|)UhlH}R(HYWm4vdtTg+ z?1;>T6fs8u`vX-)~)@=y$(Q6$2rbx+s&{hJcUW<{tp`7M$M_j$a*=p`&r$` z{r30A-O=lXXQk|oK{F@8M)suxr$u-ZV{uB^fv?kS;B{ZNRb3^^Z|&aEvW zGyF_}(wI_Q_2v9WF1~#j^-16sDxXU}QLKA&zGywl(8hvls@HsJa)-{X{MGSGP?evVBxaK0Z;0_7QuuQT+V2s{aadp1T>0UYGN63UHAcM- z{{i_SeLh@?qb~N_!IkQbC|unCVCeD!GOra1?YFGrDubzUKb*VEQqa^Y&$6M%?7|Dk zadSSCi;mp~jPR;1QEpH7Yc$fh-yq7CN#@J>5783@S^DqPeO)_`7khn6Z{(p-Phacm zsm0^Yzry?haEy90`{>)?mk~6zF>Z8Sn@6o5x<6`>uS&ffgFO(L(Qle;yE2FE(TU)uwN;IhFu;j(0dda?-G{v-)VyL+5Hto!s-Xe>#pu zGbLG~t-rqM@h)edS`JqfA*nq?N)J6b3y)!OO@KY0d&m^&x@B9o%ao_uy1q$xP2Tvv z!pm6CBnFO7O!%sLl;1LdGQoo!B44cMxW2T0Cob?Dmm|?ZbAb*x*6_SD`r>>Gh>qFS zm+BWd7@2f%2!RnT?*~|`ow0|8s5>idva|X*M4TK`2ipfM!M2YO|(rM>H>M8>(9>1D)64wi%Wz^0?B2 zne>sZ@*9^&{rP+9RKO@&HqDd#Q<77xiOO=NL{ur5SCI+SmCsBy>BZ{=vWOy>m@*9B zd?p(NGU5RQc^mL$kL&T*;5&vySv2*p*~DssJ}};lW=P6WdZQ{FA4T>Y^JX-BOqQjP zvX;7@O7YI@D~fB4$UCsB&yt_dQ2(Adx%DW0~1y3 z4NeaE$N*>RW5@G;u82&&t!Bet1Qd8&`sbcoEk_V;AcQJM_Q`t8SVfeXS^*bU&!7-j z_4yQbcYe5WCn?1`SxSCCAaFdbzkJ~6_xqJ~`d!oG{N78bwTi73O7C}$$Hci1r<1rg zl=)*6+gQ<3EOfbsFEm3wm6*!8UZNp@|L6VFIUHjRx~JKs6B)@md%6b_TqJEFTnE>b zS(&Qp)YzDG84LRfxPl+LDUBG;a2^Psf*CvywaJ#t3C? zV8AtV)>zZU5x0kusebIZ7?adGXCsn03M)Ea>YSu^)2QR0IJ3)<#aGK;_$$r8{32hR zJIneQ+!5)m)(GB7a~NAN%?(GDfQH(1>4!hdNsMWdXFMT zv~&D@#)V6i%W}+&pPD?_kdK1sU#;b(+_e1W#EhFHy-q*8)-{~PpiQ4X+VI+nM!|s8 z?JYXae<_+T z4lNHGd^bU*HiR5);y_sb5I{4NkIHH0gBm#pL-4ujnJvE_KwF0g=WCubz72kI1$^@* zA`5YnFT=H*$5i!17l)oAw*A0u_0xT>-^GuDXJA^UL` zdcqSQqv*YleD9>X^_MDqrPXQ$eV(3`&f_FkUFK>rN5p99xEhVrq(1F?1Q}-qvPMS^ zU#|*d&Lf7`$FQbW^~3O~4>53+Rr~EMd5){J74=iqz4S_4*H?bUt{)r_JrXwceK#+- zNPFCPG@M|PeqG<*C0{V{9WzlBb9ct+c#J&s7C#zTUil{uTq3ruGS!@1v=L`HJR>E{*fcqO-klad zgma5K*yvwPoBdcnUc&;nRJ2tIo9Q~13xPIR#U&1%%OP8W#LQ)#ekX=2g!*w6z9=JR zXmkUv$%~A2rLW-$3Ug&R-uO)CBpcMM41?G-lqBpt5j=JTBFv0uucA|Qk8qNnu8G2+uPMu{~F9s&ghI^^+U+O zsV7>KVD^T?9mGQNzu%TBChbifKG^S8EIS25xCFwjBr|mh6!KQZ3A7@33G6kev4VI2is^`Ovop+vLL0YzE6XAI+GiDvRa$_vV zg{M>lhly+7Z#WXyQ&YcQmBq+GfJ-Jl+#xniXC(^^2NSRD9?<)Cc6p=Kums(6&v9es zs>yD#BF=tcu(MZET}U=)^%ELBvD`N?ut;~g&l70x&$qd7pG=b+g~Oaq?q)=?Owvkh3A<^MxA*n5mLH?9xF@Pc)9X;`{>p zNbj=}!4GE$7JaQFW|`@-R6k4uclgxLVp0K@dS`CO+QsSnc_G}M>-0WkZ((ZSm4$fX zI#5w7D&-M+vPib4+Z=>J{hkf6Jmk!{zpWdgf<=}GQ7=YOO%Be)NfwFJOYNG4HsccT zAZ$Yk-sE%X-cp>yj-(te8qymZ*9K{-@3O5Ri=Oo1nwmIvEBg2QLa61)GPl3y`FV|kFukXY8I%;j_^85|W zzW#|9hfnbQnP_uLGdh7#l4e!@BuSJ56;_h7XltPwL_oqaG-jG?{LJeU>v?Vl9tDSP z1t;Sp`)<2bK=pHGpU8Dbp~1Qc$JL=n1CJG0k!53+gk3F}BrjakwtS!MQ-9jj6VZIP za(!`^_^C2|Q~cX9IB#O-Y!{#YxL4NbdxVUgJkP2B&&SIOvOvf4!I?!AKE-<4dUs7P-EOr2lj?lzJu}dwEN+QnLCBa(#iLo7x^u$AsZj0uXEb}wP)@6 z9Xq^60J+{}mak=REXS>Nm=PZMk~8L#i)hI6JCc(!havIJqJ|ycMM1rgZLPy7Kh9sp z93+XmhoJH5PuW5ZB1h&N`Q~!XZN;dN>AXv)SMy>y%sD6`FXKLKs-0_Dpc7Ji*v*^# zemiHzFRzNK@}6$>$QG~4P;*x^IvBF(raf{saSoS=C`y<(DP!1WdK0V{DN7%d%uY~e z3H{EZvW*y$UX#76S=^hWH8<8FpvWySn3nsMG|*m(5baa&qVU}CnRX=3O@HN+krL76 LmM8Jy@9h5oz!py? From b0ac67a1055a12eed0e3999d3441de05f83484a8 Mon Sep 17 00:00:00 2001 From: Adam Mikolajczak Date: Sun, 9 Jun 2024 10:39:48 +0200 Subject: [PATCH 6/8] fix: Slightly changed tractor.py - also checking if branch works --- src/tractor.py | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/src/tractor.py b/src/tractor.py index b070e0f0..fac7041e 100644 --- a/src/tractor.py +++ b/src/tractor.py @@ -32,11 +32,11 @@ class Tractor(pygame.sprite.Sprite): self.water = 50 - # A-STAR - # came_from, total_cost = self.a_star() - # path = self.reconstruct_path(came_from) - # self.actions = self.recreate_actions(path) - # self.action_index = 0 + #A-STAR + came_from, total_cost = self.a_star() + path = self.reconstruct_path(came_from) + self.actions = self.recreate_actions(path) + self.action_index = 0 # DECISION TREE: self.label_encoders = {} @@ -162,22 +162,8 @@ class Tractor(pygame.sprite.Sprite): self.move() else: self.move() - - def update(self): - # A STAR: - # if self.action_index == len(self.actions): - # return - # action = self.actions[self.action_index] - - # match (action): - # case ('move'): - # self.move() - # case ('left'): - # self.rotate('left') - # case ('right'): - # self.rotate('right') - - # DECISION TREE: + + def decision_tree(self): action = self.make_decision() if (self.get_current_tile().type != 'grass' or self.get_current_tile().type == 'water'): action = 'move' self.prev_action = action @@ -241,8 +227,22 @@ class Tractor(pygame.sprite.Sprite): self.get_current_tile().set_type('szpinak') case ('plant(ziemniak)'): self.get_current_tile().set_type('ziemniak') - self.move_2() - #self.action_index += 1 + + def update(self): + # A STAR: + if self.action_index == len(self.actions): + return + action = self.actions[self.action_index] + + match (action): + case ('move'): + self.move() + case ('left'): + self.rotate('left') + case ('right'): + self.rotate('right') + + self.action_index += 1 if self.get_current_tile().type == "grass": print("Co jest faktycznie: trawa") @@ -255,6 +255,7 @@ class Tractor(pygame.sprite.Sprite): return + def log_info(self): # print on what tile type the tractor is on x = self.rect.x // TILE_SIZE From a6f51420e1dcd1eb07831e83ab1ccf59990eda43 Mon Sep 17 00:00:00 2001 From: Adam Mikolajczak Date: Sun, 9 Jun 2024 11:11:29 +0200 Subject: [PATCH 7/8] feat: tractor goes on the tile clicked by user. --- src/main.py | 11 ++++++++++- src/tractor.py | 32 +++++++++++++++++--------------- 2 files changed, 27 insertions(+), 16 deletions(-) diff --git a/src/main.py b/src/main.py index 49196381..77fe2cdf 100644 --- a/src/main.py +++ b/src/main.py @@ -1,7 +1,7 @@ import pygame from field import Field import os -from config import TILE_SIZE, TICK_RATE +from config import TILE_SIZE, TICK_RATE, FINAL_X, FINAL_Y if __name__ == "__main__": pygame.init() @@ -13,10 +13,19 @@ if __name__ == "__main__": field = Field() running = True + while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False + if event.type == pygame.MOUSEBUTTONDOWN: + x, y = pygame.mouse.get_pos() + print(f"Mouse clicked at: ({x}, {y})") + + grid_x = x // TILE_SIZE + grid_y = y // TILE_SIZE + + field.tractor.set_new_goal((grid_x, grid_y)) field.tractor.update() screen.fill(WHITE) diff --git a/src/tractor.py b/src/tractor.py index fac7041e..b76c7058 100644 --- a/src/tractor.py +++ b/src/tractor.py @@ -18,30 +18,33 @@ class Tractor(pygame.sprite.Sprite): def __init__(self, field): super().__init__ self.field = field - + self.water = 50 self.image = pygame.image.load('images/tractor/east.png').convert_alpha() self.image = pygame.transform.scale(self.image, (TILE_SIZE, TILE_SIZE)) self.rect = self.image.get_rect() - - self.direction = STARTING_DIRECTION - # TODO: enable tractor to start on other tile than (0,0) - self.start = (START_X, START_Y) - self.final = (FINAL_X, FINAL_Y) + self.direction = 'east' + self.start = (0, 0) + self.final = (0, 0) print('destination @', self.final[0], self.final[1]) self.rect.topleft = (self.start[0] * TILE_SIZE, self.start[1] * TILE_SIZE) - self.water = 50 - - #A-STAR - came_from, total_cost = self.a_star() - path = self.reconstruct_path(came_from) - self.actions = self.recreate_actions(path) + self.rect.topleft = (self.start[0] * TILE_SIZE, self.start[1] * TILE_SIZE) + self.actions = [] self.action_index = 0 # DECISION TREE: self.label_encoders = {} self.load_decision_tree_model() + def set_new_goal(self, goal): + self.start = self.get_coordinates() + self.final = goal + came_from, total_cost = self.a_star() + path = self.reconstruct_path(came_from) + self.actions = self.recreate_actions(path) + self.action_index = 0 + print(f"New goal set to: {self.final}") + def load_decision_tree_model(self): data = pd.read_csv('tree.csv') @@ -368,13 +371,12 @@ class Tractor(pygame.sprite.Sprite): if current == self.final: break - # next_node: tuple[int, int] for next_node in self.neighboring_nodes(coordinates=current): enter_cost = self.cost_of_entering_node(coordinates=next_node) - new_cost: int = cost_so_far[current] + enter_cost + new_cost = cost_so_far[current] + enter_cost if next_node not in cost_so_far or new_cost < cost_so_far[next_node]: cost_so_far[next_node] = new_cost - priority = new_cost + self.manhattan_cost(current) + priority = new_cost + self.manhattan_cost(next_node) heapq.heappush(fringe, (priority, next_node)) came_from[next_node] = current From 30878adeddb3be19bd6edf67983df89528caa734 Mon Sep 17 00:00:00 2001 From: Adam Mikolajczak Date: Sun, 9 Jun 2024 11:26:14 +0200 Subject: [PATCH 8/8] feat: After arriving at selected by user tile, Tractor makes decision by using decision tree. The decision is printed in terminal. --- src/tractor.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/tractor.py b/src/tractor.py index b76c7058..ed2704b0 100644 --- a/src/tractor.py +++ b/src/tractor.py @@ -97,13 +97,11 @@ class Tractor(pygame.sprite.Sprite): def draw(self, surface): surface.blit(self.image, self.rect) - def get_coordinates(self): x = self.rect.x // TILE_SIZE y = self.rect.y // TILE_SIZE return (x,y) - def move(self): if self.direction == "north" and self.rect.y > 0: self.rect.y -= TILE_SIZE @@ -168,12 +166,13 @@ class Tractor(pygame.sprite.Sprite): def decision_tree(self): action = self.make_decision() - if (self.get_current_tile().type != 'grass' or self.get_current_tile().type == 'water'): action = 'move' + if (self.get_current_tile().type != 'grass' or self.get_current_tile().type == 'water'): action = 'nothing' self.prev_action = action + print("Decyzja podjęta przez drzewo decyzyjne: ", action) match (action): - case ('move'): + case ('nothing'): pass #self.move_rotating() case ('harvest'): @@ -255,6 +254,9 @@ class Tractor(pygame.sprite.Sprite): print("Rozpoznano: ", self.get_current_tile().prediction) print("Co jest faktycznie: ", self.get_current_tile().type) print("\n") + + if self.get_coordinates() == self.final: + self.decision_tree() return