mirror of
https://github.com/allenai/olmocr.git
synced 2025-06-27 04:00:02 +00:00
More nits
This commit is contained in:
parent
88270e9307
commit
da21074477
@ -30,6 +30,6 @@ peft
|
||||
wandb
|
||||
omegaconf
|
||||
s3fs
|
||||
transformers==4.51.3
|
||||
transformers>=4.45.1
|
||||
bitsandbytes
|
||||
ftfy
|
||||
|
@ -44,4 +44,4 @@ gantry run \
|
||||
--env-secret WANDB_API_KEY=JAKE_WANDB_API_KEY \
|
||||
--shared-memory 10GiB \
|
||||
--yes \
|
||||
-- /bin/bash -c "source scripts/beaker/${CLUSTER}-ib.sh && python -m olmocr.train.loaddataset ${EXTRA_ARGS} && accelerate launch --use_fsdp --num_processes \${BEAKER_ASSIGNED_GPU_COUNT} --fsdp_offload_params false --fsdp_sharding_strategy FULL_SHARD --fsdp_auto_wrap_policy TRANSFORMER_BASED_WRAP --mixed_precision bf16 -m olmocr.train.train ${EXTRA_ARGS}"
|
||||
-- /bin/bash -c "pip install transformers==4.51.3 && source scripts/beaker/${CLUSTER}-ib.sh && python -m olmocr.train.loaddataset ${EXTRA_ARGS} && accelerate launch --use_fsdp --num_processes \${BEAKER_ASSIGNED_GPU_COUNT} --fsdp_offload_params false --fsdp_sharding_strategy FULL_SHARD --fsdp_auto_wrap_policy TRANSFORMER_BASED_WRAP --mixed_precision bf16 -m olmocr.train.train ${EXTRA_ARGS}"
|
Loading…
x
Reference in New Issue
Block a user