develop #4
|
@ -41,7 +41,7 @@ class UpscaleDataset(Dataset):
|
||||||
combined_df = pd.DataFrame()
|
combined_df = pd.DataFrame()
|
||||||
for parquet_file in parquet_files:
|
for parquet_file in parquet_files:
|
||||||
# Load a subset (head(2500)) from each parquet file
|
# Load a subset (head(2500)) from each parquet file
|
||||||
df = pd.read_parquet(parquet_file, columns=['image_512', 'image_1024']).head(1250)
|
df = pd.read_parquet(parquet_file, columns=['image_512', 'image_1024']).head(2500)
|
||||||
combined_df = pd.concat([combined_df, df], ignore_index=True)
|
combined_df = pd.concat([combined_df, df], ignore_index=True)
|
||||||
|
|
||||||
# Validate rows (ensuring each value is bytes or str)
|
# Validate rows (ensuring each value is bytes or str)
|
||||||
|
@ -128,7 +128,7 @@ dataset = UpscaleDataset([
|
||||||
"/root/training_data/vision-dataset/image_upscaler.parquet",
|
"/root/training_data/vision-dataset/image_upscaler.parquet",
|
||||||
"/root/training_data/vision-dataset/image_vec_upscaler.parquet"
|
"/root/training_data/vision-dataset/image_vec_upscaler.parquet"
|
||||||
], transform=transform)
|
], transform=transform)
|
||||||
data_loader = DataLoader(dataset, batch_size=2, shuffle=True) # Consider adjusting num_workers if needed.
|
data_loader = DataLoader(dataset, batch_size=1, shuffle=True) # Consider adjusting num_workers if needed.
|
||||||
|
|
||||||
# Define loss function and optimizer.
|
# Define loss function and optimizer.
|
||||||
criterion = nn.MSELoss()
|
criterion = nn.MSELoss()
|
||||||
|
|
Loading…
Reference in New Issue