justinkay
commited on
Commit
·
c614723
1
Parent(s):
ce6dd47
Siglip to 384
Browse files- hf_zeroshot.py +2 -2
- iwildcam_demo.pt +1 -1
- models.txt +1 -1
- process_iwildcam_data.py +1 -1
hf_zeroshot.py
CHANGED
|
@@ -45,7 +45,7 @@ DESCRIPTIVE_CLASS_NAMES = [
|
|
| 45 |
# Models to test
|
| 46 |
MODELS = [
|
| 47 |
"openai/clip-vit-large-patch14",
|
| 48 |
-
"google/siglip2-
|
| 49 |
"imageomics/bioclip",
|
| 50 |
"imageomics/bioclip-2",
|
| 51 |
"facebook/PE-Core-L14-336",
|
|
@@ -370,4 +370,4 @@ def main():
|
|
| 370 |
if __name__ == "__main__":
|
| 371 |
# Change to demo directory
|
| 372 |
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
| 373 |
-
main()
|
|
|
|
| 45 |
# Models to test
|
| 46 |
MODELS = [
|
| 47 |
"openai/clip-vit-large-patch14",
|
| 48 |
+
"google/siglip2-large-patch16-384",
|
| 49 |
"imageomics/bioclip",
|
| 50 |
"imageomics/bioclip-2",
|
| 51 |
"facebook/PE-Core-L14-336",
|
|
|
|
| 370 |
if __name__ == "__main__":
|
| 371 |
# Change to demo directory
|
| 372 |
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
| 373 |
+
main()
|
iwildcam_demo.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 127187
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:bd0c2fb16ff3652c21ee75a3f854bef7868b7c438f2ddd73faf91fd35daea6d7
|
| 3 |
size 127187
|
models.txt
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
facebook/PE-Core-L14-336
|
| 2 |
-
google/siglip2-large-patch16-
|
| 3 |
openai/clip-vit-large-patch14
|
| 4 |
imageomics/bioclip-2
|
| 5 |
laion/CLIP-ViT-L-14-laion2B-s32B-b82K
|
|
|
|
| 1 |
facebook/PE-Core-L14-336
|
| 2 |
+
google/siglip2-large-patch16-384
|
| 3 |
openai/clip-vit-large-patch14
|
| 4 |
imageomics/bioclip-2
|
| 5 |
laion/CLIP-ViT-L-14-laion2B-s32B-b82K
|
process_iwildcam_data.py
CHANGED
|
@@ -53,7 +53,7 @@ def create_tensors():
|
|
| 53 |
# Load model predictions
|
| 54 |
model_files = [
|
| 55 |
'zeroshot_results_facebook_PE_Core_L14_336.json',
|
| 56 |
-
'
|
| 57 |
'zeroshot_results_openai_clip_vit_large_patch14.json',
|
| 58 |
'zeroshot_results_imageomics_bioclip_2.json',
|
| 59 |
'zeroshot_results_laion_CLIP_ViT_L_14_laion2B_s32B_b82K.json',
|
|
|
|
| 53 |
# Load model predictions
|
| 54 |
model_files = [
|
| 55 |
'zeroshot_results_facebook_PE_Core_L14_336.json',
|
| 56 |
+
'zeroshot_results_google_siglip2_large_patch16_384.json',
|
| 57 |
'zeroshot_results_openai_clip_vit_large_patch14.json',
|
| 58 |
'zeroshot_results_imageomics_bioclip_2.json',
|
| 59 |
'zeroshot_results_laion_CLIP_ViT_L_14_laion2B_s32B_b82K.json',
|