Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
70e774b2fa | |||
9614bea42a | |||
6e51e6f7d4 | |||
8834036711 | |||
8e9f37aaff | |||
cd670b95ea | |||
1f42dd374e | |||
b20f390d38 |
3
.dvc/.gitignore
vendored
3
.dvc/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
/config.local
|
||||
/tmp
|
||||
/cache
|
@ -1,4 +0,0 @@
|
||||
[core]
|
||||
remote = ium_ssh_remote
|
||||
['remote "ium_ssh_remote"']
|
||||
url = ssh://ium-sftp@tzietkiewicz.vm.wmi.amu.edu.pl
|
@ -1,3 +0,0 @@
|
||||
# Add patterns of files dvc should ignore, which could improve
|
||||
# the performance. Learn more at
|
||||
# https://dvc.org/doc/user-guide/dvcignore
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
/Spotify_Dataset.csv
|
||||
/spotify_songs.csv
|
17
.ipynb_checkpoints/Dockerfile-checkpoint
Normal file
17
.ipynb_checkpoints/Dockerfile-checkpoint
Normal file
@ -0,0 +1,17 @@
|
||||
FROM ubuntu:latest
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
python3 \
|
||||
python3-pip \
|
||||
wget \
|
||||
unzip \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN pip3 install pandas scikit-learn requests numpy
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY model_creator.py /app/
|
||||
|
||||
RUN chmod +x model_creator.py
|
39
.ipynb_checkpoints/Jenkinsfile-checkpoint
Normal file
39
.ipynb_checkpoints/Jenkinsfile-checkpoint
Normal file
@ -0,0 +1,39 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
triggers {
|
||||
upstream(upstreamProjects: 'z-s464953-create-dataset', threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
parameters {
|
||||
string(name: 'TEST_SIZE', defaultValue: '0.10', description: 'Size of test dataset')
|
||||
string(name: 'MAX_ITER', defaultValue: '1000', description: 'Max number of iterations')
|
||||
buildSelector(defaultSelector: lastSuccessful(), description: 'Which build to use for copying artifacts', name: 'BUILD_SELECTOR')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git branch: 'training', url: 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
stage('Copy Artifacts') {
|
||||
steps {
|
||||
copyArtifacts filter: 'artifacts/*', projectName: 'z-s464953-create-dataset', selector: buildParameter('BUILD_SELECTOR')
|
||||
}
|
||||
}
|
||||
stage("Run Docker") {
|
||||
agent {
|
||||
dockerfile {
|
||||
filename 'Dockerfile'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
steps {
|
||||
|
||||
sh "python3 /app/model_creator.py ${params.TEST_SIZE} ${params.MAX_ITER}"
|
||||
archiveArtifacts artifacts: '/app/model.pkl', onlyIfSuccessful: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -10,9 +10,27 @@ from sklearn.preprocessing import StandardScaler
|
||||
from sklearn. preprocessing import LabelEncoder
|
||||
import pickle
|
||||
|
||||
def check_datasets_presence():
|
||||
|
||||
dataset_1 = "Spotify_Dataset.csv"
|
||||
dataset_2 = "spotify_songs.csv"
|
||||
destination_folder = "artifacts"
|
||||
|
||||
if not os.path.exists(destination_folder):
|
||||
raise FileNotFoundError(destination_folder + " folder not found")
|
||||
if dataset_1 in os.listdir("/."):
|
||||
shutil.move(dataset_1, destination_folder)
|
||||
elif dataset_1 not in os.listdir(destination_folder):
|
||||
raise FileNotFoundError(dataset_1 + " not found")
|
||||
|
||||
if dataset_2 in os.listdir("/."):
|
||||
shutil.move(dataset_2, destination_folder)
|
||||
elif dataset_2 not in os.listdir(destination_folder):
|
||||
raise FileNotFoundError(dataset_2 + " not found")
|
||||
|
||||
def datasets_preparation():
|
||||
df_1 = pd.read_csv("spotify_songs.csv")
|
||||
df_2 = pd.read_csv("Spotify_Dataset.csv", sep=";")
|
||||
df_1 = pd.read_csv("artifacts/spotify_songs.csv")
|
||||
df_2 = pd.read_csv("artifacts/Spotify_Dataset.csv", sep=";")
|
||||
|
||||
df_1 = df_1.dropna()
|
||||
df_2 = df_2.dropna()
|
||||
@ -42,8 +60,8 @@ def datasets_preparation():
|
||||
|
||||
#df_1 = df_1.iloc[20:]
|
||||
|
||||
if "docker_test_dataset.csv" not in os.listdir():
|
||||
diff_df.to_csv("docker_test_dataset.csv", index=False)
|
||||
if "docker_test_dataset.csv" not in os.listdir("artifacts"):
|
||||
diff_df.to_csv("artifacts/docker_test_dataset.csv", index=False)
|
||||
|
||||
result_df = pd.merge(df_1, df_2, on='track_name', how='inner')
|
||||
result_df = result_df.drop_duplicates(subset=['track_name'])
|
||||
@ -57,6 +75,8 @@ def datasets_preparation():
|
||||
|
||||
return result_df
|
||||
|
||||
|
||||
check_datasets_presence()
|
||||
result_df = datasets_preparation()
|
||||
Y = result_df[['playlist_genre']]
|
||||
X = result_df.drop(columns='playlist_genre')
|
@ -4,9 +4,14 @@ RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
python3 \
|
||||
python3-pip \
|
||||
git \
|
||||
wget \
|
||||
unzip \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN pip3 install pandas scikit-learn requests kaggle numpy sacred pymongo --break-system-package
|
||||
RUN pip3 install pandas scikit-learn requests numpy
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY model_creator.py /app/
|
||||
|
||||
RUN chmod +x model_creator.py
|
39
Jenkinsfile
vendored
39
Jenkinsfile
vendored
@ -1,39 +1,38 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
triggers {
|
||||
upstream(upstreamProjects: 'z-s464953-create-dataset', threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
parameters {
|
||||
string(name: 'KAGGLE_USERNAME', defaultValue: 'gulczas', description: 'Kaggle username')
|
||||
password(name: 'KAGGLE_KEY', defaultValue: '', description: 'Kaggle API key')
|
||||
string(name: 'TEST_SIZE', defaultValue: '0.10', description: 'Size of test dataset')
|
||||
string(name: 'MAX_ITER', defaultValue: '1000', description: 'Max number of iterations')
|
||||
buildSelector(defaultSelector: lastSuccessful(), description: 'Which build to use for copying artifacts', name: 'BUILD_SELECTOR')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
git branch: 'training', url: 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
stage('Cleanup Artifacts') {
|
||||
stage('Copy Artifacts') {
|
||||
steps {
|
||||
script {
|
||||
sh 'rm -rf artifacts'
|
||||
copyArtifacts filter: 'artifacts/*', projectName: 'z-s464953-create-dataset', selector: buildParameter('BUILD_SELECTOR')
|
||||
}
|
||||
}
|
||||
stage("Run Docker") {
|
||||
agent {
|
||||
dockerfile {
|
||||
filename 'Dockerfile'
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
stage('Run Script') {
|
||||
steps {
|
||||
script {
|
||||
withEnv([
|
||||
"KAGGLE_USERNAME=${env.KAGGLE_USERNAME}",
|
||||
"KAGGLE_KEY=${env.KAGGLE_KEY}"])
|
||||
{
|
||||
sh "bash ./download_dataset.sh"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Archive Artifacts') {
|
||||
steps {
|
||||
archiveArtifacts artifacts: 'artifacts/*', onlyIfSuccessful: true
|
||||
|
||||
sh "python3 /app/model_creator.py ${params.TEST_SIZE} ${params.MAX_ITER}"
|
||||
archiveArtifacts artifacts: 'model.pkl, artifacts/docker_test_dataset.csv', onlyIfSuccessful: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,57 +0,0 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
parameters {
|
||||
string(name: 'KAGGLE_USERNAME', defaultValue: 'gulczas', description: 'Kaggle username')
|
||||
password(name: 'KAGGLE_KEY', defaultValue: '', description: 'Kaggle API key')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Stop and remove existing container') {
|
||||
steps {
|
||||
script {
|
||||
sh "docker stop s464953 || true"
|
||||
sh "docker rm s464953 || true"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build Docker image') {
|
||||
steps {
|
||||
script {
|
||||
withEnv([
|
||||
"KAGGLE_USERNAME=${env.KAGGLE_USERNAME}",
|
||||
"KAGGLE_KEY=${env.KAGGLE_KEY}"
|
||||
]) {
|
||||
sh "docker build --build-arg KAGGLE_USERNAME=$KAGGLE_USERNAME --build-arg KAGGLE_KEY=$KAGGLE_KEY -t s464953 ."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Run Docker container') {
|
||||
steps {
|
||||
script {
|
||||
withEnv([
|
||||
"KAGGLE_USERNAME=${env.KAGGLE_USERNAME}",
|
||||
"KAGGLE_KEY=${env.KAGGLE_KEY}"
|
||||
]) {
|
||||
sh "docker run --name s464953 -e KAGGLE_USERNAME=$KAGGLE_USERNAME -e KAGGLE_KEY=$KAGGLE_KEY -v ${WORKSPACE}:/app s464953"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Archive stats.txt artifact') {
|
||||
steps {
|
||||
archiveArtifacts artifacts: 'stats.txt', allowEmptyArchive: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
parameters {
|
||||
string(name: 'KAGGLE_USERNAME', defaultValue: 'gulczas', description: 'Kaggle username')
|
||||
password(name: 'KAGGLE_KEY', defaultValue: '', description: 'Kaggle API key')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Stop and remove existing container') {
|
||||
steps {
|
||||
script {
|
||||
sh "docker stop s464953 || true"
|
||||
sh "docker rm s464953 || true"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Run Docker container') {
|
||||
steps {
|
||||
script {
|
||||
withEnv([
|
||||
"KAGGLE_USERNAME=${env.KAGGLE_USERNAME}",
|
||||
"KAGGLE_KEY=${env.KAGGLE_KEY}"
|
||||
]) {
|
||||
sh "docker run --name s464953 -e KAGGLE_USERNAME=$KAGGLE_USERNAME -e KAGGLE_KEY=$KAGGLE_KEY -v ${WORKSPACE}:/app michalgulczynski/ium_s464953:1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Archive stats.txt artifact') {
|
||||
steps {
|
||||
archiveArtifacts artifacts: 'stats.txt', allowEmptyArchive: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
parameters {
|
||||
string(name: 'KAGGLE_USERNAME', defaultValue: 'gulczas', description: 'Kaggle username')
|
||||
password(name: 'KAGGLE_KEY', defaultValue: '', description: 'Kaggle API key')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Stop and remove existing container') {
|
||||
steps {
|
||||
script {
|
||||
sh "docker stop s464953 || true"
|
||||
sh "docker rm s464953 || true"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build Docker image') {
|
||||
steps {
|
||||
script {
|
||||
withEnv([
|
||||
"KAGGLE_USERNAME=${env.KAGGLE_USERNAME}",
|
||||
"KAGGLE_KEY=${env.KAGGLE_KEY}"
|
||||
]) {
|
||||
sh "docker build --build-arg KAGGLE_USERNAME=$KAGGLE_USERNAME --build-arg KAGGLE_KEY=$KAGGLE_KEY -t s464953 ."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Run Docker container') {
|
||||
steps {
|
||||
script {
|
||||
withEnv([
|
||||
"KAGGLE_USERNAME=${env.KAGGLE_USERNAME}",
|
||||
"KAGGLE_KEY=${env.KAGGLE_KEY}"
|
||||
]) {
|
||||
sh "docker run --name s464953 -e KAGGLE_USERNAME=$KAGGLE_USERNAME -e KAGGLE_KEY=$KAGGLE_KEY -v ${WORKSPACE}:/app s464953"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Archive stats.txt artifact') {
|
||||
steps {
|
||||
archiveArtifacts artifacts: 'model.pkl', allowEmptyArchive: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,42 +0,0 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
parameters {
|
||||
buildSelector( defaultSelector: lastSuccessful(), description: 'Build for copying artifacts', name: 'BUILD_SELECTOR')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
stage('Cleanup Artifacts') {
|
||||
steps {
|
||||
script {
|
||||
sh 'rm -rf artifacts'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Copy Artifact') {
|
||||
steps {
|
||||
withEnv([
|
||||
"BUILD_SELECTOR=${params.BUILD_SELECTOR}"
|
||||
]) {
|
||||
copyArtifacts fingerprintArtifacts: true, projectName: 'z-s464953-create-dataset', selector: buildParameter('$BUILD_SELECTOR')}
|
||||
}
|
||||
}
|
||||
stage('Execute Shell Script') {
|
||||
steps {
|
||||
script {
|
||||
sh "bash ./dataset_stats.sh"
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Archive Results') {
|
||||
steps {
|
||||
archiveArtifacts artifacts: 'artifacts/*', onlyIfSuccessful: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
parameters {
|
||||
string(name: 'KAGGLE_USERNAME', defaultValue: 'gulczas', description: 'Kaggle username')
|
||||
password(name: 'KAGGLE_KEY', defaultValue: '', description: 'Kaggle API key')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Clone Repository') {
|
||||
steps {
|
||||
git 'https://git.wmi.amu.edu.pl/s464953/ium_464953.git'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Download datasets') {
|
||||
steps {
|
||||
withEnv(["KAGGLE_USERNAME=${params.KAGGLE_USERNAME}", "KAGGLE_KEY=${params.KAGGLE_KEY}"]) {
|
||||
sh "bash ./download_dataset.sh"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Build and Run Experiments') {
|
||||
agent {
|
||||
dockerfile {
|
||||
reuseNode true
|
||||
}
|
||||
}
|
||||
|
||||
environment {
|
||||
KAGGLE_USERNAME = "${params.KAGGLE_USERNAME}"
|
||||
KAGGLE_KEY = "${params.KAGGLE_KEY}"
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'chmod +x sacred/sacred_model_creator.py'
|
||||
sh 'python3 sacred/sacred_model_creator.py'
|
||||
sh 'chmod +x sacred/sacred_use_model.py'
|
||||
sh 'python3 sacred/sacred_use_model.py'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Archive Artifacts from Experiments') {
|
||||
steps {
|
||||
archiveArtifacts artifacts: 'my_experiment_logs/**', allowEmptyArchive: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
name: MLflow Example
|
||||
|
||||
conda_env: conda.yaml
|
||||
|
||||
entry_points:
|
||||
main:
|
||||
command: "python model_creator.py {max_iter}"
|
||||
parameters:
|
||||
max_iter: {type: int, default: 1000}
|
||||
test:
|
||||
command: "python use_model.py"
|
File diff suppressed because it is too large
Load Diff
@ -1,11 +0,0 @@
|
||||
name: Spotify genre recognition - s464953
|
||||
channels:
|
||||
- defaults
|
||||
dependencies:
|
||||
- python=3.9
|
||||
- pip
|
||||
- pip:
|
||||
- mlflow
|
||||
- pandas
|
||||
- scikit-learn
|
||||
- numpy
|
File diff suppressed because it is too large
Load Diff
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: 9026270861774aad82aee9fc231054b4
|
||||
run_id: 04eba1c93f6a4510b4487ad0789fa76f
|
||||
utc_time_created: '2024-05-13 21:25:05.523657'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: 9026270861774aad82aee9fc231054b4
|
||||
run_id: 04eba1c93f6a4510b4487ad0789fa76f
|
||||
utc_time_created: '2024-05-13 21:25:05.523657'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
Binary file not shown.
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
@ -1,15 +0,0 @@
|
||||
artifact_uri: file:///D:/studia/inzynieria%20uczenia%20maszynowego/ium_464953/MLProject/mlruns/0/04eba1c93f6a4510b4487ad0789fa76f/artifacts
|
||||
end_time: 1715635510283
|
||||
entry_point_name: ''
|
||||
experiment_id: '0'
|
||||
lifecycle_stage: active
|
||||
run_id: 04eba1c93f6a4510b4487ad0789fa76f
|
||||
run_name: valuable-goat-689
|
||||
run_uuid: 04eba1c93f6a4510b4487ad0789fa76f
|
||||
source_name: ''
|
||||
source_type: 4
|
||||
source_version: ''
|
||||
start_time: 1715635487472
|
||||
status: 3
|
||||
tags: []
|
||||
user_id: Michał
|
@ -1 +0,0 @@
|
||||
1715635505497 0.4782608695652174 0
|
@ -1 +0,0 @@
|
||||
1000
|
@ -1 +0,0 @@
|
||||
LogisticRegression
|
@ -1 +0,0 @@
|
||||
0.1
|
@ -1 +0,0 @@
|
||||
https://git.wmi.amu.edu.pl/s464953/ium_464953.git
|
@ -1 +0,0 @@
|
||||
[{"run_id": "04eba1c93f6a4510b4487ad0789fa76f", "artifact_path": "model", "utc_time_created": "2024-05-13 21:25:05.523657", "flavors": {"python_function": {"model_path": "model.pkl", "predict_fn": "predict", "loader_module": "mlflow.sklearn", "python_version": "3.9.19", "env": {"conda": "conda.yaml", "virtualenv": "python_env.yaml"}}, "sklearn": {"pickled_model": "model.pkl", "sklearn_version": "1.4.2", "serialization_format": "cloudpickle", "code": null}}, "model_uuid": "9026270861774aad82aee9fc231054b4", "mlflow_version": "2.12.2", "model_size_bytes": 1446}]
|
@ -1 +0,0 @@
|
||||
local
|
@ -1 +0,0 @@
|
||||
main
|
@ -1 +0,0 @@
|
||||
conda
|
@ -1 +0,0 @@
|
||||
valuable-goat-689
|
@ -1 +0,0 @@
|
||||
390d6b118b45f3613f049b5cf665ff66ca00cbd5
|
@ -1 +0,0 @@
|
||||
https://git.wmi.amu.edu.pl/s464953/ium_464953.git
|
@ -1 +0,0 @@
|
||||
file://D:\studia\inzynieria uczenia maszynowego\ium_464953#\MLProject
|
@ -1 +0,0 @@
|
||||
PROJECT
|
@ -1 +0,0 @@
|
||||
Michał
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: b733a1b574ba4815ac1f2887d47fe45c
|
||||
run_id: 2e98f71c04cd4e21a26b13ae9daaf43b
|
||||
utc_time_created: '2024-05-13 21:21:21.420484'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: b733a1b574ba4815ac1f2887d47fe45c
|
||||
run_id: 2e98f71c04cd4e21a26b13ae9daaf43b
|
||||
utc_time_created: '2024-05-13 21:21:21.420484'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
Binary file not shown.
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
@ -1,15 +0,0 @@
|
||||
artifact_uri: file:///D:/studia/inzynieria%20uczenia%20maszynowego/ium_464953/MLProject/mlruns/0/2e98f71c04cd4e21a26b13ae9daaf43b/artifacts
|
||||
end_time: 1715635286846
|
||||
entry_point_name: ''
|
||||
experiment_id: '0'
|
||||
lifecycle_stage: active
|
||||
run_id: 2e98f71c04cd4e21a26b13ae9daaf43b
|
||||
run_name: illustrious-shark-67
|
||||
run_uuid: 2e98f71c04cd4e21a26b13ae9daaf43b
|
||||
source_name: ''
|
||||
source_type: 4
|
||||
source_version: ''
|
||||
start_time: 1715635260477
|
||||
status: 3
|
||||
tags: []
|
||||
user_id: Michał
|
@ -1 +0,0 @@
|
||||
1715635281395 0.4782608695652174 0
|
@ -1 +0,0 @@
|
||||
1000
|
@ -1 +0,0 @@
|
||||
LogisticRegression
|
@ -1 +0,0 @@
|
||||
0.1
|
@ -1 +0,0 @@
|
||||
https://git.wmi.amu.edu.pl/s464953/ium_464953.git
|
@ -1 +0,0 @@
|
||||
[{"run_id": "2e98f71c04cd4e21a26b13ae9daaf43b", "artifact_path": "model", "utc_time_created": "2024-05-13 21:21:21.420484", "flavors": {"python_function": {"model_path": "model.pkl", "predict_fn": "predict", "loader_module": "mlflow.sklearn", "python_version": "3.9.19", "env": {"conda": "conda.yaml", "virtualenv": "python_env.yaml"}}, "sklearn": {"pickled_model": "model.pkl", "sklearn_version": "1.4.2", "serialization_format": "cloudpickle", "code": null}}, "model_uuid": "b733a1b574ba4815ac1f2887d47fe45c", "mlflow_version": "2.12.2", "model_size_bytes": 1446}]
|
@ -1 +0,0 @@
|
||||
local
|
@ -1 +0,0 @@
|
||||
main
|
@ -1 +0,0 @@
|
||||
conda
|
@ -1 +0,0 @@
|
||||
illustrious-shark-67
|
@ -1 +0,0 @@
|
||||
390d6b118b45f3613f049b5cf665ff66ca00cbd5
|
@ -1 +0,0 @@
|
||||
https://git.wmi.amu.edu.pl/s464953/ium_464953.git
|
@ -1 +0,0 @@
|
||||
file://D:\studia\inzynieria uczenia maszynowego\ium_464953#\MLProject
|
@ -1 +0,0 @@
|
||||
PROJECT
|
@ -1 +0,0 @@
|
||||
Michał
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: 89ad4cf7b9e7444ea84049ba5d88fdb8
|
||||
run_id: 71242ca0b6f446d89f411c36212b6761
|
||||
utc_time_created: '2024-05-13 20:57:47.221852'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: 89ad4cf7b9e7444ea84049ba5d88fdb8
|
||||
run_id: 71242ca0b6f446d89f411c36212b6761
|
||||
utc_time_created: '2024-05-13 20:57:47.221852'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
Binary file not shown.
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
@ -1,15 +0,0 @@
|
||||
artifact_uri: file:///D:/studia/inzynieria%20uczenia%20maszynowego/ium_464953/MLProject/mlruns/0/71242ca0b6f446d89f411c36212b6761/artifacts
|
||||
end_time: 1715633872371
|
||||
entry_point_name: ''
|
||||
experiment_id: '0'
|
||||
lifecycle_stage: active
|
||||
run_id: 71242ca0b6f446d89f411c36212b6761
|
||||
run_name: industrious-gull-774
|
||||
run_uuid: 71242ca0b6f446d89f411c36212b6761
|
||||
source_name: ''
|
||||
source_type: 4
|
||||
source_version: ''
|
||||
start_time: 1715633850262
|
||||
status: 3
|
||||
tags: []
|
||||
user_id: Michał
|
@ -1 +0,0 @@
|
||||
1715633867196 0.4782608695652174 0
|
@ -1 +0,0 @@
|
||||
1000
|
@ -1 +0,0 @@
|
||||
LogisticRegression
|
@ -1 +0,0 @@
|
||||
0.1
|
@ -1 +0,0 @@
|
||||
https://git.wmi.amu.edu.pl/s464953/ium_464953.git
|
@ -1 +0,0 @@
|
||||
[{"run_id": "71242ca0b6f446d89f411c36212b6761", "artifact_path": "model", "utc_time_created": "2024-05-13 20:57:47.221852", "flavors": {"python_function": {"model_path": "model.pkl", "predict_fn": "predict", "loader_module": "mlflow.sklearn", "python_version": "3.9.19", "env": {"conda": "conda.yaml", "virtualenv": "python_env.yaml"}}, "sklearn": {"pickled_model": "model.pkl", "sklearn_version": "1.4.2", "serialization_format": "cloudpickle", "code": null}}, "model_uuid": "89ad4cf7b9e7444ea84049ba5d88fdb8", "mlflow_version": "2.12.2", "model_size_bytes": 1446}]
|
@ -1 +0,0 @@
|
||||
local
|
@ -1 +0,0 @@
|
||||
main
|
@ -1 +0,0 @@
|
||||
conda
|
@ -1 +0,0 @@
|
||||
industrious-gull-774
|
@ -1 +0,0 @@
|
||||
390d6b118b45f3613f049b5cf665ff66ca00cbd5
|
@ -1 +0,0 @@
|
||||
https://git.wmi.amu.edu.pl/s464953/ium_464953.git
|
@ -1 +0,0 @@
|
||||
file://D:\studia\inzynieria uczenia maszynowego\ium_464953#\MLProject
|
@ -1 +0,0 @@
|
||||
PROJECT
|
@ -1 +0,0 @@
|
||||
Michał
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: c575ab1b63c840b1b87f2c5d6a51721c
|
||||
run_id: ef10e2199a2346dabe10eb9e7bdea061
|
||||
utc_time_created: '2024-05-13 20:51:58.533911'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,20 +0,0 @@
|
||||
artifact_path: model
|
||||
flavors:
|
||||
python_function:
|
||||
env:
|
||||
conda: conda.yaml
|
||||
virtualenv: python_env.yaml
|
||||
loader_module: mlflow.sklearn
|
||||
model_path: model.pkl
|
||||
predict_fn: predict
|
||||
python_version: 3.9.19
|
||||
sklearn:
|
||||
code: null
|
||||
pickled_model: model.pkl
|
||||
serialization_format: cloudpickle
|
||||
sklearn_version: 1.4.2
|
||||
mlflow_version: 2.12.2
|
||||
model_size_bytes: 1446
|
||||
model_uuid: c575ab1b63c840b1b87f2c5d6a51721c
|
||||
run_id: ef10e2199a2346dabe10eb9e7bdea061
|
||||
utc_time_created: '2024-05-13 20:51:58.533911'
|
@ -1,15 +0,0 @@
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python=3.9.19
|
||||
- pip<=24.0
|
||||
- pip:
|
||||
- mlflow==2.12.2
|
||||
- cloudpickle==3.0.0
|
||||
- numpy==1.26.4
|
||||
- packaging==23.1
|
||||
- psutil==5.9.5
|
||||
- pyyaml==6.0.1
|
||||
- scikit-learn==1.4.2
|
||||
- scipy==1.13.0
|
||||
name: mlflow-env
|
@ -1,7 +0,0 @@
|
||||
python: 3.9.19
|
||||
build_dependencies:
|
||||
- pip==24.0
|
||||
- setuptools
|
||||
- wheel==0.43.0
|
||||
dependencies:
|
||||
- -r requirements.txt
|
@ -1,8 +0,0 @@
|
||||
mlflow==2.12.2
|
||||
cloudpickle==3.0.0
|
||||
numpy==1.26.4
|
||||
packaging==23.1
|
||||
psutil==5.9.5
|
||||
pyyaml==6.0.1
|
||||
scikit-learn==1.4.2
|
||||
scipy==1.13.0
|
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user