current status

This commit is contained in:
Patrick 2024-07-03 00:42:21 +02:00
commit 6031c15723
12490 changed files with 13035 additions and 0 deletions

50
AImageDataset.py Normal file
View File

@ -0,0 +1,50 @@
import torch.utils.data
import numpy as np
import numpy.random as npr
import torchvision
class AImagesDataset(torch.utils.data.Dataset):
@staticmethod
def augment_tensor(t: torch.Tensor, i: int) -> (torch.Tensor, str):
match i % 7:
case 0:
return t, "Original"
case 1:
return torchvision.transforms.GaussianBlur(kernel_size=5).forward(t), "GaussianBlur"
case 2:
return torchvision.transforms.RandomRotation(degrees=180).forward(t), "RandomRotation"
case 3:
return torchvision.transforms.RandomVerticalFlip().forward(t), "RandomVerticalFlip"
case 4:
return torchvision.transforms.RandomHorizontalFlip().forward(t), "RandomHorizontalFlip"
case 5:
return torchvision.transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2,
hue=0.1).forward(t), "ColorJitter"
case 6:
rng = npr.default_rng()
return AImagesDataset.augment_tensor(
AImagesDataset.augment_tensor(
AImagesDataset.augment_tensor(t, rng.integers(1, 6))[0],
rng.integers(1, 6))[0],
rng.integers(1, 6))[0], "Compose"
@staticmethod
def augment_image(
img_np: np.ndarray,
index: int
) -> (torch.Tensor, str):
tensor = torch.from_numpy(img_np)
return AImagesDataset.augment_tensor(tensor, index)
def __init__(self, data_set: torch.utils.data.Dataset):
super().__init__()
self.data = data_set
def __getitem__(self, index: int):
image, class_id, class_name, image_filepath = self.data[index // 7]
img, transform = AImagesDataset.augment_tensor(image, index)
return img, transform, index, class_id, class_name, image_filepath
def __len__(self):
return self.data.__len__() * 7

196
AsyncProgress.py Normal file
View File

@ -0,0 +1,196 @@
import queue
import threading
import time
import tkinter as tk
import tkinter.ttk as ttk
from contextlib import contextmanager
from dataclasses import dataclass
from typing import Optional
class ClearableQueue(queue.Queue):
def clear(self):
with self.mutex:
while self._qsize() != 0:
_ = self._get()
@contextmanager
def acquire_timeout(lock, timeout):
result = lock.acquire(timeout=timeout)
try:
yield result
finally:
if result:
lock.release()
class AsyncProgress(ttk.Frame):
@dataclass(repr=False)
class Stats:
total: int
current: int = 0
start_time: float = time.time()
last_elapsed_time: float = 0
total_elapsed_time: float = 0
def __repr__(self):
ips = 1 / self.last_elapsed_time if self.last_elapsed_time != 0 else 0
return (f"{'{v:{p}d}'.format(v=self.current, p=len(str(self.total)))}/{self.total} "
f"| {ips:.2f}it/s "
f"| elapsed: {time.time() - self.start_time:.2f} "
f"| remaining: {(self.total - self.current) * self.last_elapsed_time:.2f} ")
class Range:
def __init__(self, parent, iterable, update_interval):
self.iterable = iterable
self.parent = parent
self.update_interval = update_interval
self.parent.pbar['maximum'] = len(iterable)
def __iter__(self):
last_time = time.time()
last_update_time = 0
stats = AsyncProgress.Stats(len(self.iterable))
self.parent._start()
try:
for obj in self.iterable:
yield obj
stats.current += 1
stats.last_elapsed_time = time.time() - last_time
last_time = time.time()
if time.time() - last_update_time > self.update_interval / 1000:
self.parent.step(1, stats)
last_update_time = time.time()
stats.total_elapsed_time = time.time() - stats.start_time
self.parent._finish(stats)
finally:
self.close()
def close(self):
self.iterable = range(0)
def __init__(self, parent, *,
width=450,
height=30,
update_interval=20,
range_update_interval=10,
label=None):
super().__init__(parent, width=width, height=height)
self.grid_propagate(False)
self.__event_step_queue = ClearableQueue()
self.__lock = threading.Lock()
self.__tk_pbar_value = tk.IntVar()
self.__tk_stats_str = tk.StringVar()
self.__tk_stats_str.set("Not running")
self.running = False
self.update_interval = update_interval
self.range_update_interval = range_update_interval
self.label = ttk.Label(self, text=label if label is not None else "")
self.pbar = ttk.Progressbar(self, variable=self.__tk_pbar_value)
self.stats = ttk.Label(self, textvariable=self.__tk_stats_str)
self.label.configure(font='TkFixedFont')
self.stats.configure(font='TkFixedFont')
self.label.grid(row=0, column=0, sticky=tk.NW)
self.pbar.grid(row=0, column=1, sticky=tk.NW)
self.stats.grid(row=0, column=2, sticky=tk.NW, padx=5)
self.__schedule_update()
def step(self, amount: int = 1, stat: Optional[Stats] = None):
self.__event_step_queue.put((amount, stat))
def reset(self):
with self.__lock:
self.running = False
self.__event_step_queue.clear()
self.__tk_pbar_value.set(0)
self.__tk_stats_str.set("Not running")
def range(self, start_stop, stop=None, step=1):
with self.__lock:
if self.running:
raise RuntimeError('Progressbar is already running')
if stop is None:
stop = start_stop
start_stop = 0
return AsyncProgress.Range(self, range(start_stop, stop, step), self.range_update_interval)
def _start(self):
with self.__lock:
self.running = True
def _finish(self, stat):
with self.__lock:
self.running = False
self.__event_step_queue.clear()
self.__event_step_queue.put((0, stat))
def __schedule_update(self):
self.master.after(self.update_interval, self.__update_self)
def __update_self(self):
with acquire_timeout(self.__lock, 0.1):
while not self.__event_step_queue.empty():
(amount, stat) = self.__event_step_queue.get()
if stat is not None:
self.__tk_pbar_value.set(stat.current)
if self.running:
self.__tk_stats_str.set(stat)
else:
self.__tk_stats_str.set(
f'{stat.current}/{stat.total} | total: {stat.total_elapsed_time:.2f} seconds')
else:
self.__tk_pbar_value.set(self.__tk_pbar_value.get() + amount)
self.__schedule_update()
if __name__ == '__main__':
def worker(pbar, t):
print('Starting worker')
while True:
for i in pbar.range(10):
print(i)
time.sleep(t)
time.sleep(1)
pbar.reset()
time.sleep(1)
print("Finished worker" + str(t))
root = tk.Tk()
bar1 = AsyncProgress(root, label="Progressbar 1", update_interval=20, range_update_interval=10)
bar2 = AsyncProgress(root, label="Progressbar 2", update_interval=20, range_update_interval=10)
bar3 = AsyncProgress(root, label=None, update_interval=20, range_update_interval=10)
bar1.grid(row=0, column=0)
bar2.grid(row=1, column=0)
bar3.grid(row=2, column=0)
# worker(bar)
def start_worker():
threading.Thread(target=worker, args=(bar1, 0.10), daemon=True).start()
threading.Thread(target=worker, args=(bar2, 10.15), daemon=True).start()
threading.Thread(target=worker, args=(bar3, 0.05), daemon=True).start()
print("Worker threads started")
root.after(0, start_worker)
root.focus_set()
root.mainloop()

59
architecture.py Normal file
View File

@ -0,0 +1,59 @@
import torch
import torch.nn
class MyCNN(torch.nn.Module):
def __init__(self,
input_channels: int,
input_size: tuple[int, int],
hidden_channels: list[int],
output_channels: int,
use_batchnorm: bool,
kernel_size: list,
stride: list[int],
activation_function: torch.nn.Module = torch.nn.ReLU()):
super().__init__()
input_layer = torch.nn.Conv2d(in_channels=input_channels,
out_channels=hidden_channels[0],
kernel_size=kernel_size[0],
padding='same' if (stride[0] == 1 or stride[0] == 0) else 'valid',
stride=stride[0])
hidden_layers = [torch.nn.Conv2d(hidden_channels[i - 1],
hidden_channels[i],
kernel_size[i],
padding='same' if (stride[i] == 1 or stride[i] == 0) else 'valid',
stride=stride[i])
for i in range(1, len(hidden_channels))]
self.output_layer = torch.nn.Linear(hidden_channels[-1] * input_size[0] * input_size[1], output_channels)
def activation_function_repeater():
while True:
yield activation_function
layers_except_output = [input_layer,
*hidden_layers]
if use_batchnorm:
batch_norm_layers = [torch.nn.BatchNorm2d(hidden_channels[i]) for i in range(0, len(hidden_channels))]
# Adding an empty layer to not mess up list concatenation
batch_norm_layers = [*batch_norm_layers, torch.nn.BatchNorm2d(0)]
layers_except_output = [layer
for layer_tuple
in zip(layers_except_output, batch_norm_layers, activation_function_repeater())
for layer
in layer_tuple]
else:
layers_except_output = [layer
for layer_tuple in zip(layers_except_output, activation_function_repeater())
for layer in layer_tuple]
self.layers = torch.nn.Sequential(*layers_except_output)
def forward(self, input_images: torch.Tensor) -> torch.Tensor:
output = self.layers(input_images)
return self.output_layer(output.view(output.shape[0], -1))
# model = MyCNN()

119
cnn_train.py Normal file
View File

@ -0,0 +1,119 @@
import tkinter as tk
import warnings
from datetime import datetime
import numpy.random
import torch.utils.data
import torch.cuda
from tqdm.tk import tqdm
from architecture import MyCNN
from dataset import ImagesDataset
from AImageDataset import AImagesDataset
model = MyCNN(input_channels=1,
input_size=(100, 100),
hidden_channels=[500, 250, 100, 50],
output_channels=20,
use_batchnorm=True,
kernel_size=[9, 5, 3, 3, 1],
stride=[1, 1, 1, 1, 1],
activation_function=torch.nn.ReLU())
num_epochs = 100
batch_size = 64
optimizer = torch.optim.ASGD(model.parameters(),
lr=0.001,
lambd=1e-4,
alpha=0.75,
t0=1000000.0,
weight_decay=0)
loss_function = torch.nn.CrossEntropyLoss()
if __name__ == '__main__':
torch.random.manual_seed(42)
numpy.random.seed(42)
start_time = datetime.now()
dataset = ImagesDataset("training_data")
# dataset = torch.utils.data.Subset(dataset, range(0, 20))
train_data, eval_data = torch.utils.data.random_split(dataset, [0.5, 0.5])
train_loader = torch.utils.data.DataLoader(AImagesDataset(train_data), batch_size=batch_size)
eval_loader = torch.utils.data.DataLoader(eval_data, batch_size=1)
if torch.cuda.is_available():
# print("GPU available")
model = model.cuda()
else:
warnings.warn("GPU not available")
train_losses = []
eval_losses = []
progress_epoch = tqdm(range(num_epochs), position=0, tk_parent=root_window)
progress_epoch.set_description("Epoch")
progress_train_data = tqdm(train_loader, position=1, tk_parent=root_window)
progress_eval_data = tqdm(eval_loader, position=2, tk_parent=root_window)
progress_train_data.set_description("Training progress")
progress_eval_data.set_description("Evaluation progress")
for epoch in progress_epoch:
train_loss = 0
eval_loss = 0
progress_train_data.reset()
progress_eval_data.reset()
# Start training of model
model.train()
for batch_nr, (imageT, transforms, img_index, classIDs, labels, paths) in enumerate(progress_train_data):
imageT = imageT.to('cuda')
classIDs = classIDs.to('cuda')
# progress_train_data.set_postfix_str("Running model...")
outputs = model(imageT)
optimizer.zero_grad()
# progress_train_data.set_postfix_str("calculating loss...")
loss = loss_function(outputs, classIDs)
# progress_train_data.set_postfix_str("propagating loss...")
loss.backward()
# progress_train_data.set_postfix_str("optimizing...")
optimizer.step()
train_loss += loss.item()
mean_loss = train_loss / len(train_loader)
train_losses.append(mean_loss)
# evaluation of model
model.eval()
with torch.no_grad():
for (imageT, classIDs, labels, paths) in progress_eval_data:
imageT = imageT.to('cuda')
classIDs = classIDs.to('cuda')
outputs = model(imageT)
loss = loss_function(outputs, classIDs)
eval_loss = loss.item()
eval_losses.append(eval_loss)
# print epoch summary
# print(f"Epoch: {epoch} --- Train loss: {train_loss:7.4f} --- Eval loss: {eval_loss:7.4f}")
torch.save(model.state_dict(), f'models/model-{start_time.strftime("%Y%m%d-%H%M%S")}-epoch-{epoch}.pt')

104
dataset.py Normal file
View File

@ -0,0 +1,104 @@
from glob import glob
from os import path
import os
import torch
from typing import Optional
import math
import numpy as np
from PIL import Image
from torch.utils.data import Dataset
class ImagesDataset(Dataset):
def __init__(
self,
image_dir,
width: int = 100,
height: int = 100,
dtype: Optional[type] = None
):
self.image_filepaths = sorted(path.abspath(f) for f in glob(path.join(image_dir, "*.jpg")))
class_filepath = [path.abspath(f) for f in glob(path.join(image_dir, "*.csv"))][0]
self.filenames_classnames, self.classnames_to_ids = ImagesDataset.load_classnames(class_filepath)
if width < 100 or height < 100:
raise ValueError('width and height must be greater than or equal 100')
self.width = width
self.height = height
self.dtype = dtype
@staticmethod
def load_classnames(class_filepath: str):
filenames_classnames = np.genfromtxt(class_filepath, delimiter=';', skip_header=1, dtype=str)
classnames = np.unique(filenames_classnames[:, 1])
classnames.sort()
classnames_to_ids = {}
for index, classname in enumerate(classnames):
classnames_to_ids[classname] = index
return filenames_classnames, classnames_to_ids
def __getitem__(self, index):
with Image.open(self.image_filepaths[index]) as im:
image = np.array(im, dtype=self.dtype)
image = to_grayscale(image)
resized_image, _ = prepare_image(image, self.width, self.height, 0, 0, 32)
resized_image = torch.tensor(resized_image, dtype=torch.float32)/255.0
classname = self.filenames_classnames[index][1]
classid = self.classnames_to_ids[classname]
return resized_image, classid, classname, self.image_filepaths[index]
def __len__(self):
return len(self.image_filepaths)
def to_grayscale(pil_image: np.ndarray) -> np.ndarray:
if pil_image.ndim == 2:
return pil_image.copy()[None]
if pil_image.ndim != 3:
raise ValueError("image must have either shape (H, W) or (H, W, 3)")
if pil_image.shape[2] != 3:
raise ValueError(f"image has shape (H, W, {pil_image.shape[2]}), but it should have (H, W, 3)")
rgb = pil_image / 255
rgb_linear = np.where(
rgb < 0.04045,
rgb / 12.92,
((rgb + 0.055) / 1.055) ** 2.4
)
grayscale_linear = 0.2126 * rgb_linear[..., 0] + 0.7152 * rgb_linear[..., 1] + 0.0722 * rgb_linear[..., 2]
grayscale = np.where(
grayscale_linear < 0.0031308,
12.92 * grayscale_linear,
1.055 * grayscale_linear ** (1 / 2.4) - 0.055
)
grayscale = grayscale * 255
if np.issubdtype(pil_image.dtype, np.integer):
grayscale = np.round(grayscale)
return grayscale.astype(pil_image.dtype)[None]
def prepare_image(image: np.ndarray, width: int, height: int, x: int, y: int, size: int):
if image.ndim < 3 or image.shape[-3] != 1:
raise ValueError("image must have shape (1, H, W)")
if width < 32 or height < 32 or size < 32:
raise ValueError("width/height/size must be >= 32")
if x < 0 or (x + size) > width:
raise ValueError(f"x={x} and size={size} do not fit into the resized image width={width}")
if y < 0 or (y + size) > height:
raise ValueError(f"y={y} and size={size} do not fit into the resized image height={height}")
image = image.copy()
if image.shape[1] > height:
image = image[:, (image.shape[1] - height) // 2: (image.shape[1] - height) // 2 + height, :]
else:
image = np.pad(image, ((0, 0), ((height - image.shape[1])//2, math.ceil((height - image.shape[1])/2)), (0, 0)), mode='edge')
if image.shape[2] > width:
image = image[:, :, (image.shape[2] - width) // 2: (image.shape[2] - width) // 2 + width]
else:
image = np.pad(image, ((0, 0), (0, 0), ((width - image.shape[2])//2, math.ceil((width - image.shape[2])/2))), mode='edge')
subarea = image[:, y:y + size, x:x + size]
return image, subarea

23
main.py Normal file
View File

@ -0,0 +1,23 @@
import tkinter as tk
import tkinter.ttk as ttk
from AsyncProgress import AsyncProgress
if __name__ == '__main__':
root = tk.Tk()
root.title("AI Project")
progress_frame = ttk.Frame(root, padding=20, width=500)
progress_frame.grid(row=0, column=0)
pbar_epoch = AsyncProgress(progress_frame, label=" Epoch: ")
pbar_train = AsyncProgress(progress_frame, label=" Train: ")
pbar_eval = AsyncProgress(progress_frame, label="Evaluation: ")
pbar_epoch.grid(row=0, column=0)
pbar_train.grid(row=1, column=0)
pbar_eval.grid(row=2, column=0)
root.focus_set()
root.mainloop()

BIN
training_data/0001439.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

BIN
training_data/0001440.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

BIN
training_data/0001469.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

BIN
training_data/0001470.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
training_data/0001537.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

BIN
training_data/0001538.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

BIN
training_data/0001539.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

BIN
training_data/0001540.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

BIN
training_data/0001541.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

BIN
training_data/0001576.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
training_data/0001577.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

BIN
training_data/0001578.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
training_data/0001579.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
training_data/0001580.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
training_data/0001581.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
training_data/0001590.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
training_data/0001644.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.1 KiB

BIN
training_data/0001660.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

BIN
training_data/0001661.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

BIN
training_data/0001662.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

BIN
training_data/0001663.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

BIN
training_data/0001664.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

BIN
training_data/0001680.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

BIN
training_data/0001681.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

BIN
training_data/0001682.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
training_data/0001683.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

BIN
training_data/0001684.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
training_data/0001711.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

BIN
training_data/0001712.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

BIN
training_data/0001713.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

BIN
training_data/0001714.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
training_data/0001715.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

BIN
training_data/0001716.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
training_data/0001717.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

BIN
training_data/0001718.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

BIN
training_data/0001719.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

BIN
training_data/0001737.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 KiB

BIN
training_data/0001738.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

BIN
training_data/0001739.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

BIN
training_data/0001740.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

BIN
training_data/0001773.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

BIN
training_data/0001787.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

BIN
training_data/0001788.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

BIN
training_data/0001790.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

BIN
training_data/0001791.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

BIN
training_data/0001792.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

BIN
training_data/0001793.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

BIN
training_data/0001794.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
training_data/0001795.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.1 KiB

BIN
training_data/0001796.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.4 KiB

BIN
training_data/0001797.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

BIN
training_data/0001798.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

BIN
training_data/0001813.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
training_data/0001814.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.2 KiB

BIN
training_data/0001824.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
training_data/0001830.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

BIN
training_data/0001831.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

BIN
training_data/0001832.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
training_data/0001833.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

BIN
training_data/0001834.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
training_data/0001835.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
training_data/0001836.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

BIN
training_data/0001837.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

BIN
training_data/0001838.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

BIN
training_data/0001839.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
training_data/0001845.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
training_data/0001846.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
training_data/0001847.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
training_data/0001848.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

BIN
training_data/0001849.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

BIN
training_data/0001860.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

BIN
training_data/0001861.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

BIN
training_data/0001862.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

BIN
training_data/0001863.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

BIN
training_data/0001864.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

BIN
training_data/0001885.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

BIN
training_data/0001886.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

BIN
training_data/0001887.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

BIN
training_data/0001888.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

BIN
training_data/0001889.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

BIN
training_data/0001895.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

BIN
training_data/0001896.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

BIN
training_data/0001897.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

BIN
training_data/0001898.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.1 KiB

BIN
training_data/0001899.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.1 KiB

BIN
training_data/0001925.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

BIN
training_data/0001926.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

BIN
training_data/0001927.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

BIN
training_data/0001928.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
training_data/0001929.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

BIN
training_data/0001933.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

BIN
training_data/0001934.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

BIN
training_data/0001940.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

BIN
training_data/0001941.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

Some files were not shown because too many files have changed in this diff Show More