From 79e0aeb269277ec956313f11183c966d35cb7743 Mon Sep 17 00:00:00 2001 From: Falko Habel Date: Mon, 24 Feb 2025 16:39:25 +0100 Subject: [PATCH] downsized batchsize increased images --- src/aiunn/finetune.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/aiunn/finetune.py b/src/aiunn/finetune.py index 0013ec9..508a4e8 100644 --- a/src/aiunn/finetune.py +++ b/src/aiunn/finetune.py @@ -41,7 +41,7 @@ class UpscaleDataset(Dataset): combined_df = pd.DataFrame() for parquet_file in parquet_files: # Load a subset (head(2500)) from each parquet file - df = pd.read_parquet(parquet_file, columns=['image_512', 'image_1024']).head(1250) + df = pd.read_parquet(parquet_file, columns=['image_512', 'image_1024']).head(2500) combined_df = pd.concat([combined_df, df], ignore_index=True) # Validate rows (ensuring each value is bytes or str) @@ -128,7 +128,7 @@ dataset = UpscaleDataset([ "/root/training_data/vision-dataset/image_upscaler.parquet", "/root/training_data/vision-dataset/image_vec_upscaler.parquet" ], transform=transform) -data_loader = DataLoader(dataset, batch_size=2, shuffle=True) # Consider adjusting num_workers if needed. +data_loader = DataLoader(dataset, batch_size=1, shuffle=True) # Consider adjusting num_workers if needed. # Define loss function and optimizer. criterion = nn.MSELoss()