cleanup + jenkinsfile update
This commit is contained in:
parent
863a84ac18
commit
405d577aac
@ -16,9 +16,11 @@ ENV LC_ALL en_US.UTF-8
|
|||||||
ENV LANG en_US.UTF-8
|
ENV LANG en_US.UTF-8
|
||||||
ENV LANGUAGE en_US.UTF-8
|
ENV LANGUAGE en_US.UTF-8
|
||||||
|
|
||||||
RUN python3 -m pip --version
|
|
||||||
RUN python3 -m pip install kaggle
|
RUN python3 -m pip install kaggle
|
||||||
RUN python3 -m pip install pandas
|
RUN python3 -m pip install pandas
|
||||||
|
RUN python3 -m pip install wheel --no-deps -U
|
||||||
|
RUN python3 -m pip install torch
|
||||||
|
RUN python3 -m pip install sklearn
|
||||||
RUN python3 -m pip freeze
|
RUN python3 -m pip freeze
|
||||||
|
|
||||||
ENV PATH="/root/.local/bin:${PATH}"
|
ENV PATH="/root/.local/bin:${PATH}"
|
||||||
@ -31,4 +33,5 @@ RUN chmod a+x ./stats-docker.sh
|
|||||||
RUN chmod a+x ./script-stats.py
|
RUN chmod a+x ./script-stats.py
|
||||||
|
|
||||||
# RUN ./download.sh 117928
|
# RUN ./download.sh 117928
|
||||||
RUN python3 ./script-download.py
|
RUN python3 ./script-download.py
|
||||||
|
# RUN python3 ./lab05_deepLearning.py
|
44
Jenkinsfile
vendored
44
Jenkinsfile
vendored
@ -1,44 +0,0 @@
|
|||||||
pipeline {
|
|
||||||
agent any
|
|
||||||
parameters {
|
|
||||||
string(
|
|
||||||
defaultValue: 'heatedboss2',
|
|
||||||
description: 'Kaggle username',
|
|
||||||
name: 'KAGGLE_USERNAME',
|
|
||||||
trim: false
|
|
||||||
)
|
|
||||||
password(
|
|
||||||
defaultValue: '',
|
|
||||||
description: 'Kaggle token',
|
|
||||||
name: 'KAGGLE_KEY'
|
|
||||||
)
|
|
||||||
string(
|
|
||||||
defaultValue: '1',
|
|
||||||
description: 'Cutoff',
|
|
||||||
name: 'CUTOFF',
|
|
||||||
trim: false
|
|
||||||
)
|
|
||||||
}
|
|
||||||
stages {
|
|
||||||
stage('Checkout') {
|
|
||||||
steps {
|
|
||||||
checkout([$class: 'GitSCM', branches: [
|
|
||||||
[name: '*/master']
|
|
||||||
], extensions: [], userRemoteConfigs: [
|
|
||||||
[credentialsId: '8b8d54ee-f03c-4980-90b1-959faa97082b', url: 'https://git.wmi.amu.edu.pl/s444507/ium_444507.git']
|
|
||||||
]])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Script') {
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}", "KAGGLE_KEY=${params.KAGGLE_KEY}", "CUTOFF=${params.CUTOFF}"]) {
|
|
||||||
sh 'echo KAGGLE_USERNAME: $KAGGLE_USERNAME'
|
|
||||||
sh './download.sh $CUTOFF > ./script_logs.txt'
|
|
||||||
archiveArtifacts artifacts: 'car_prices.csv.dev, car_prices.csv.test, car_prices.csv.train, script_logs.txt', followSymlinks: false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
19
Jenkinsfile2
19
Jenkinsfile2
@ -1,19 +0,0 @@
|
|||||||
pipeline {
|
|
||||||
agent any
|
|
||||||
parameters {
|
|
||||||
buildSelector(
|
|
||||||
defaultSelector: lastSuccessful(),
|
|
||||||
name: 'BUILD_SELECTOR',
|
|
||||||
description: 'Which build to use for copying artifacts'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
stages {
|
|
||||||
stage("Script") {
|
|
||||||
steps {
|
|
||||||
copyArtifacts fingerprintArtifacts: true, projectName: 's444507-create-dataset', selector: buildParameter('BUILD_SELECTOR')
|
|
||||||
sh './stats.sh'
|
|
||||||
archiveArtifacts artifacts: 'stats.txt'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -11,7 +11,13 @@ pipeline {
|
|||||||
stage('Show stats') {
|
stage('Show stats') {
|
||||||
steps {
|
steps {
|
||||||
sh "python3 ./script-stats.py"
|
sh "python3 ./script-stats.py"
|
||||||
|
sh "python3 ./lab05_deepLearning.py"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
post {
|
||||||
|
success {
|
||||||
|
archiveArtifacts artifacts: 'Car_Prices_Poland_Kaggle*', followSymlinks: false
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@ -1,4 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
while read line; do
|
|
||||||
figlet "$line"
|
|
||||||
done
|
|
@ -8,16 +8,15 @@ from sklearn.metrics import accuracy_score
|
|||||||
import torch.nn.functional as F
|
import torch.nn.functional as F
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from sklearn import preprocessing
|
from sklearn import preprocessing
|
||||||
import matplotlib.pyplot as plt
|
# import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
|
||||||
class Model(nn.Module):
|
class Model(nn.Module):
|
||||||
def __init__(self, input_dim):
|
def __init__(self, input_dim):
|
||||||
super(Model, self).__init__()
|
super(Model, self).__init__()
|
||||||
self.layer1 = nn.Linear(input_dim, 160)
|
self.layer1 = nn.Linear(input_dim, 100)
|
||||||
# self.layer2 = nn.Linear(320, 160)
|
self.layer2 = nn.Linear(100, 60)
|
||||||
self.layer2 = nn.Linear(160, 80)
|
self.layer3 = nn.Linear(60, 5)
|
||||||
self.layer3 = nn.Linear(80, 5)
|
|
||||||
|
|
||||||
def forward(self, x):
|
def forward(self, x):
|
||||||
x = F.relu(self.layer1(x))
|
x = F.relu(self.layer1(x))
|
||||||
@ -61,6 +60,12 @@ def prepare_dataset_raw(dataset):
|
|||||||
return lab, feat
|
return lab, feat
|
||||||
|
|
||||||
|
|
||||||
|
# def draw_plot(lbl):
|
||||||
|
# need to import matplotlib to work
|
||||||
|
# plt.hist(lbl, bins=[i for i in range(len(set(lbl)))], edgecolor="black")
|
||||||
|
# plt.xticks(np.arange(0, len(set(lbl)), 1))
|
||||||
|
# plt.show()
|
||||||
|
|
||||||
# Prepare dataset
|
# Prepare dataset
|
||||||
print("Loading dataset...")
|
print("Loading dataset...")
|
||||||
dataset = load_dataset_raw()
|
dataset = load_dataset_raw()
|
||||||
@ -71,16 +76,14 @@ dataset = remove_rows(dataset)
|
|||||||
labels, features = prepare_dataset_raw(dataset)
|
labels, features = prepare_dataset_raw(dataset)
|
||||||
print("Dataset prepared")
|
print("Dataset prepared")
|
||||||
|
|
||||||
plot = plt.hist(labels, bins=[i for i in range(len(set(labels)))], edgecolor="black")
|
|
||||||
plt.xticks(np.arange(0, len(set(labels)), 1))
|
|
||||||
plt.show()
|
|
||||||
features_train, features_test, labels_train, labels_test = train_test_split(features, labels, random_state=42,
|
features_train, features_test, labels_train, labels_test = train_test_split(features, labels, random_state=42,
|
||||||
shuffle=True)
|
shuffle=True)
|
||||||
# Training
|
# Training
|
||||||
model = Model(features_train.shape[1])
|
model = Model(features_train.shape[1])
|
||||||
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
|
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
|
||||||
loss_fn = nn.CrossEntropyLoss()
|
loss_fn = nn.CrossEntropyLoss()
|
||||||
epochs = 100
|
epochs = 1000
|
||||||
|
|
||||||
print("Starting model training...")
|
print("Starting model training...")
|
||||||
x_train, y_train = Variable(torch.from_numpy(features_train)).float(), Variable(torch.from_numpy(labels_train)).long()
|
x_train, y_train = Variable(torch.from_numpy(features_train)).float(), Variable(torch.from_numpy(labels_train)).long()
|
||||||
|
@ -14,6 +14,7 @@ def unzip_package():
|
|||||||
os.system('rm ./car-prices-poland.zip')
|
os.system('rm ./car-prices-poland.zip')
|
||||||
print('Zip file removed')
|
print('Zip file removed')
|
||||||
|
|
||||||
|
|
||||||
def download_dataset():
|
def download_dataset():
|
||||||
"""Download kaggle dataset."""
|
"""Download kaggle dataset."""
|
||||||
print('Downloading dataset...')
|
print('Downloading dataset...')
|
||||||
|
Loading…
Reference in New Issue
Block a user