massivly downsized data

This commit is contained in:
Falko Victor Habel 2025-02-23 20:09:28 +01:00
parent ad27ea2fa2
commit 7603ce8851
1 changed files with 1 additions and 1 deletions

View File

@ -41,7 +41,7 @@ class UpscaleDataset(Dataset):
combined_df = pd.DataFrame()
for parquet_file in parquet_files:
# Load data with head() to limit rows for memory efficiency.
df = pd.read_parquet(parquet_file, columns=['image_512', 'image_1024']).head(1250)
df = pd.read_parquet(parquet_file, columns=['image_512', 'image_1024']).head(500)
combined_df = pd.concat([combined_df, df], ignore_index=True)
# Validate that each row has proper image formats.