Paul Gessinger
commited on
Commit
·
f9c1358
1
Parent(s):
746ff15
refine script
Browse files- scripts/manage_dataset_files.py +119 -49
scripts/manage_dataset_files.py
CHANGED
|
@@ -14,6 +14,9 @@ from __future__ import annotations
|
|
| 14 |
|
| 15 |
import asyncio
|
| 16 |
import contextlib
|
|
|
|
|
|
|
|
|
|
| 17 |
from dataclasses import dataclass
|
| 18 |
from pathlib import Path
|
| 19 |
from typing import Annotated, Any, List, Sequence
|
|
@@ -23,6 +26,7 @@ import aiohttp
|
|
| 23 |
import typer
|
| 24 |
import yaml
|
| 25 |
from rich.console import Console
|
|
|
|
| 26 |
from rich.progress import (
|
| 27 |
BarColumn,
|
| 28 |
DownloadColumn,
|
|
@@ -37,6 +41,15 @@ from rich.table import Table
|
|
| 37 |
console = Console()
|
| 38 |
app = typer.Typer(help="Manage dataset file URLs declared in README front matter.")
|
| 39 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
@dataclass
|
| 42 |
class DataFileEntry:
|
|
@@ -156,6 +169,7 @@ async def download_one(
|
|
| 156 |
skip_existing: bool,
|
| 157 |
progress: Progress,
|
| 158 |
order: int,
|
|
|
|
| 159 |
) -> DownloadResult:
|
| 160 |
_, remote_path = entry.parsed()
|
| 161 |
filename = Path(remote_path).name or remote_path
|
|
@@ -164,37 +178,72 @@ async def download_one(
|
|
| 164 |
async with semaphore:
|
| 165 |
task_id: int | None = None
|
| 166 |
destination = resolve_destination(entry, output_dir)
|
| 167 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 168 |
try:
|
| 169 |
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 170 |
task_id = progress.add_task(description, total=0, start=False)
|
| 171 |
progress.start_task(task_id)
|
| 172 |
async with session.get(entry.url) as response:
|
| 173 |
response.raise_for_status()
|
| 174 |
total_bytes = response.content_length or 0
|
| 175 |
-
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
and
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 191 |
if total_bytes:
|
| 192 |
progress.update(task_id, total=total_bytes)
|
| 193 |
with tmp_path.open("wb") as handle:
|
| 194 |
async for chunk in response.content.iter_chunked(1 << 17):
|
| 195 |
handle.write(chunk)
|
| 196 |
progress.update(task_id, advance=len(chunk))
|
| 197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
return DownloadResult(
|
| 199 |
entry=entry,
|
| 200 |
path=destination,
|
|
@@ -224,45 +273,57 @@ async def perform_downloads(
|
|
| 224 |
max_concurrency: int,
|
| 225 |
timeout: float,
|
| 226 |
skip_existing: bool,
|
|
|
|
| 227 |
) -> List[DownloadResult]:
|
| 228 |
if not entries:
|
| 229 |
return []
|
| 230 |
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 242 |
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
|
|
|
|
|
|
| 256 |
)
|
| 257 |
-
|
| 258 |
-
tasks.append(task)
|
| 259 |
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
|
| 264 |
-
|
| 265 |
-
|
| 266 |
|
| 267 |
|
| 268 |
async def verify_one(
|
|
@@ -436,6 +497,14 @@ def download(
|
|
| 436 |
help="Skip downloading files that already exist locally.",
|
| 437 |
),
|
| 438 |
] = True,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 439 |
) -> None:
|
| 440 |
front_matter_text, body_text = read_front_matter(readme_path)
|
| 441 |
entries = load_data_file_entries(front_matter_text)
|
|
@@ -498,6 +567,7 @@ def download(
|
|
| 498 |
max_concurrency=max_concurrency,
|
| 499 |
timeout=timeout,
|
| 500 |
skip_existing=skip_existing,
|
|
|
|
| 501 |
)
|
| 502 |
)
|
| 503 |
|
|
|
|
| 14 |
|
| 15 |
import asyncio
|
| 16 |
import contextlib
|
| 17 |
+
import logging
|
| 18 |
+
import shutil
|
| 19 |
+
import tempfile
|
| 20 |
from dataclasses import dataclass
|
| 21 |
from pathlib import Path
|
| 22 |
from typing import Annotated, Any, List, Sequence
|
|
|
|
| 26 |
import typer
|
| 27 |
import yaml
|
| 28 |
from rich.console import Console
|
| 29 |
+
from rich.logging import RichHandler
|
| 30 |
from rich.progress import (
|
| 31 |
BarColumn,
|
| 32 |
DownloadColumn,
|
|
|
|
| 41 |
console = Console()
|
| 42 |
app = typer.Typer(help="Manage dataset file URLs declared in README front matter.")
|
| 43 |
|
| 44 |
+
# Set up logging with rich handler
|
| 45 |
+
logging.basicConfig(
|
| 46 |
+
level=logging.INFO,
|
| 47 |
+
format="%(message)s",
|
| 48 |
+
datefmt="[%X]",
|
| 49 |
+
handlers=[RichHandler(console=console, show_path=False)],
|
| 50 |
+
)
|
| 51 |
+
logger = logging.getLogger(__name__)
|
| 52 |
+
|
| 53 |
|
| 54 |
@dataclass
|
| 55 |
class DataFileEntry:
|
|
|
|
| 169 |
skip_existing: bool,
|
| 170 |
progress: Progress,
|
| 171 |
order: int,
|
| 172 |
+
staging_dir: Path | None = None,
|
| 173 |
) -> DownloadResult:
|
| 174 |
_, remote_path = entry.parsed()
|
| 175 |
filename = Path(remote_path).name or remote_path
|
|
|
|
| 178 |
async with semaphore:
|
| 179 |
task_id: int | None = None
|
| 180 |
destination = resolve_destination(entry, output_dir)
|
| 181 |
+
|
| 182 |
+
# Determine download location (staging or direct)
|
| 183 |
+
if staging_dir:
|
| 184 |
+
download_dest = resolve_destination(entry, staging_dir)
|
| 185 |
+
tmp_path = download_dest.parent / f"{download_dest.name}.part"
|
| 186 |
+
else:
|
| 187 |
+
download_dest = destination
|
| 188 |
+
tmp_path = destination.parent / f"{destination.name}.part"
|
| 189 |
+
|
| 190 |
try:
|
| 191 |
destination.parent.mkdir(parents=True, exist_ok=True)
|
| 192 |
+
download_dest.parent.mkdir(parents=True, exist_ok=True)
|
| 193 |
task_id = progress.add_task(description, total=0, start=False)
|
| 194 |
progress.start_task(task_id)
|
| 195 |
async with session.get(entry.url) as response:
|
| 196 |
response.raise_for_status()
|
| 197 |
total_bytes = response.content_length or 0
|
| 198 |
+
|
| 199 |
+
# Check if we can skip this file
|
| 200 |
+
if skip_existing and destination.exists():
|
| 201 |
+
local_size = destination.stat().st_size
|
| 202 |
+
if total_bytes and local_size == total_bytes:
|
| 203 |
+
if task_id is not None:
|
| 204 |
+
progress.remove_task(task_id)
|
| 205 |
+
task_id = None
|
| 206 |
+
logger.info(
|
| 207 |
+
f"Skipped {destination.name} ({total_bytes:,} bytes) - already exists with correct size"
|
| 208 |
+
)
|
| 209 |
+
return DownloadResult(
|
| 210 |
+
entry=entry,
|
| 211 |
+
path=destination,
|
| 212 |
+
success=True,
|
| 213 |
+
skipped=True,
|
| 214 |
+
order=order,
|
| 215 |
+
)
|
| 216 |
+
else:
|
| 217 |
+
# File exists but needs re-download
|
| 218 |
+
if not total_bytes:
|
| 219 |
+
reason = f"remote size unknown, local size is {local_size:,} bytes"
|
| 220 |
+
else:
|
| 221 |
+
reason = f"size mismatch (local: {local_size:,} bytes, remote: {total_bytes:,} bytes)"
|
| 222 |
+
logger.info(f"Downloading {destination.name} - {reason}")
|
| 223 |
+
else:
|
| 224 |
+
# File doesn't exist or skip_existing is disabled
|
| 225 |
+
if not destination.exists():
|
| 226 |
+
size_info = f" ({total_bytes:,} bytes)" if total_bytes else ""
|
| 227 |
+
logger.info(f"Downloading {destination.name}{size_info} - file not found locally")
|
| 228 |
+
else:
|
| 229 |
+
size_info = f" ({total_bytes:,} bytes)" if total_bytes else ""
|
| 230 |
+
logger.info(f"Downloading {destination.name}{size_info} - skip_existing is disabled")
|
| 231 |
+
|
| 232 |
if total_bytes:
|
| 233 |
progress.update(task_id, total=total_bytes)
|
| 234 |
with tmp_path.open("wb") as handle:
|
| 235 |
async for chunk in response.content.iter_chunked(1 << 17):
|
| 236 |
handle.write(chunk)
|
| 237 |
progress.update(task_id, advance=len(chunk))
|
| 238 |
+
|
| 239 |
+
# Move from .part to final location
|
| 240 |
+
tmp_path.rename(download_dest)
|
| 241 |
+
|
| 242 |
+
# If using staging, move from staging to final destination asynchronously
|
| 243 |
+
if staging_dir:
|
| 244 |
+
logger.info(f"Moving {download_dest.name} from staging to {destination.parent.name}/")
|
| 245 |
+
await asyncio.to_thread(shutil.move, str(download_dest), str(destination))
|
| 246 |
+
|
| 247 |
return DownloadResult(
|
| 248 |
entry=entry,
|
| 249 |
path=destination,
|
|
|
|
| 273 |
max_concurrency: int,
|
| 274 |
timeout: float,
|
| 275 |
skip_existing: bool,
|
| 276 |
+
use_staging: bool = False,
|
| 277 |
) -> List[DownloadResult]:
|
| 278 |
if not entries:
|
| 279 |
return []
|
| 280 |
|
| 281 |
+
with contextlib.ExitStack() as stack:
|
| 282 |
+
# Create staging directory if needed
|
| 283 |
+
staging_dir: Path | None = None
|
| 284 |
+
if use_staging:
|
| 285 |
+
staging_tmp = stack.enter_context(
|
| 286 |
+
tempfile.TemporaryDirectory(prefix="dataset_staging_")
|
| 287 |
+
)
|
| 288 |
+
staging_dir = Path(staging_tmp)
|
| 289 |
+
logger.info(f"Using staging directory: {staging_dir}")
|
| 290 |
+
|
| 291 |
+
semaphore = asyncio.Semaphore(max_concurrency)
|
| 292 |
+
results: List[DownloadResult] = []
|
| 293 |
+
timeout_cfg = aiohttp.ClientTimeout(total=timeout)
|
| 294 |
+
progress = Progress(
|
| 295 |
+
TextColumn("{task.description}"),
|
| 296 |
+
BarColumn(bar_width=None),
|
| 297 |
+
DownloadColumn(),
|
| 298 |
+
TransferSpeedColumn(),
|
| 299 |
+
TimeElapsedColumn(),
|
| 300 |
+
console=console,
|
| 301 |
+
)
|
| 302 |
|
| 303 |
+
async with aiohttp.ClientSession(timeout=timeout_cfg) as session:
|
| 304 |
+
with progress:
|
| 305 |
+
tasks: list[asyncio.Task[DownloadResult]] = []
|
| 306 |
+
for order, entry in enumerate(entries):
|
| 307 |
+
task = asyncio.create_task(
|
| 308 |
+
download_one(
|
| 309 |
+
entry=entry,
|
| 310 |
+
output_dir=output_dir,
|
| 311 |
+
session=session,
|
| 312 |
+
semaphore=semaphore,
|
| 313 |
+
skip_existing=skip_existing,
|
| 314 |
+
progress=progress,
|
| 315 |
+
order=order,
|
| 316 |
+
staging_dir=staging_dir,
|
| 317 |
+
)
|
| 318 |
)
|
| 319 |
+
tasks.append(task)
|
|
|
|
| 320 |
|
| 321 |
+
for future in asyncio.as_completed(tasks):
|
| 322 |
+
result = await future
|
| 323 |
+
results.append(result)
|
| 324 |
|
| 325 |
+
results.sort(key=lambda item: item.order)
|
| 326 |
+
return results
|
| 327 |
|
| 328 |
|
| 329 |
async def verify_one(
|
|
|
|
| 497 |
help="Skip downloading files that already exist locally.",
|
| 498 |
),
|
| 499 |
] = True,
|
| 500 |
+
use_staging: Annotated[
|
| 501 |
+
bool,
|
| 502 |
+
typer.Option(
|
| 503 |
+
"--stage/--no-stage",
|
| 504 |
+
show_default=True,
|
| 505 |
+
help="Download to a temporary staging directory first, then move to final destination.",
|
| 506 |
+
),
|
| 507 |
+
] = False,
|
| 508 |
) -> None:
|
| 509 |
front_matter_text, body_text = read_front_matter(readme_path)
|
| 510 |
entries = load_data_file_entries(front_matter_text)
|
|
|
|
| 567 |
max_concurrency=max_concurrency,
|
| 568 |
timeout=timeout,
|
| 569 |
skip_existing=skip_existing,
|
| 570 |
+
use_staging=use_staging,
|
| 571 |
)
|
| 572 |
)
|
| 573 |
|