Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Example: Versioning

Datasets and ML model versioning example
[for Get Started](https://dvc.org/doc/get-started/example-versioning).
Datasets and ML model getting started
[versioning tutorial](https://dvc.org/doc/tutorials/versioning).
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
tensorflow==1.13.1
keras==2.2.4
pillow==5.3.0

pillow>=5.3,<6
scipy>=1.3,<2
tensorflow>=2,<3
tqdm>=4,<5
19 changes: 14 additions & 5 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,13 @@
import json
import sys
import os
from tqdm import tqdm

from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dropout, Flatten, Dense
from tensorflow.keras import applications
from tensorflow.keras.callbacks import LambdaCallback, CSVLogger

pathname = os.path.dirname(sys.argv[0])
path = os.path.abspath(pathname)
Expand Down Expand Up @@ -111,11 +113,18 @@ def train_top_model():
model.compile(optimizer='rmsprop',
loss='binary_crossentropy', metrics=['accuracy'])

history = model.fit(train_data, train_labels,
epochs=epochs,
batch_size=batch_size,
validation_data=(validation_data, validation_labels))
json.dump(history.history, open("metrics.json", 'w'))
with tqdm(total=epochs, unit='epoch') as t:
def progress_epoch(_, logs=None):
if logs:
t.set_postfix(logs, refresh=False)
t.update()
model.fit(train_data, train_labels,
epochs=epochs,
batch_size=batch_size,
validation_data=(validation_data, validation_labels),
verbose=0,
callbacks=[LambdaCallback(on_epoch_end=progress_epoch),
CSVLogger("metrics.csv")])
model.save_weights(top_model_weights_path)


Expand Down