gsaltintas commited on
Commit
dd2a5fd
·
verified ·
1 Parent(s): 569526f

Uploading tokenizer_robustness_completion_farsi_colloquial subset

Browse files
README.md CHANGED
@@ -20,6 +20,10 @@ configs:
20
  data_files:
21
  - split: test
22
  path: tokenizer_robustness_completion_farsi_code_language_script_switching/test-*
 
 
 
 
23
  dataset_info:
24
  - config_name: tokenizer_robustness_completion_farsi_arabic_keyboard_for_farsi
25
  features:
@@ -393,6 +397,130 @@ dataset_info:
393
  num_examples: 40
394
  download_size: 40872
395
  dataset_size: 24063
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
396
  ---
397
 
398
  # Dataset Card for Tokenization Robustness
 
20
  data_files:
21
  - split: test
22
  path: tokenizer_robustness_completion_farsi_code_language_script_switching/test-*
23
+ - config_name: tokenizer_robustness_completion_farsi_colloquial
24
+ data_files:
25
+ - split: test
26
+ path: tokenizer_robustness_completion_farsi_colloquial/test-*
27
  dataset_info:
28
  - config_name: tokenizer_robustness_completion_farsi_arabic_keyboard_for_farsi
29
  features:
 
397
  num_examples: 40
398
  download_size: 40872
399
  dataset_size: 24063
400
+ - config_name: tokenizer_robustness_completion_farsi_colloquial
401
+ features:
402
+ - name: question
403
+ dtype: string
404
+ - name: choices
405
+ list: string
406
+ - name: answer
407
+ dtype: int64
408
+ - name: answer_label
409
+ dtype: string
410
+ - name: split
411
+ dtype: string
412
+ - name: subcategories
413
+ dtype: string
414
+ - name: category
415
+ dtype: string
416
+ - name: lang
417
+ dtype: string
418
+ - name: second_lang
419
+ dtype: string
420
+ - name: notes
421
+ dtype: string
422
+ - name: id
423
+ dtype: string
424
+ - name: set_id
425
+ dtype: string
426
+ - name: variation_id
427
+ dtype: string
428
+ - name: vanilla_cos_sim_to_canonical
429
+ struct:
430
+ - name: CohereLabs/aya-expanse-8b
431
+ dtype: float64
432
+ - name: Qwen/Qwen3-8B
433
+ dtype: float64
434
+ - name: bigscience/bloom
435
+ dtype: float64
436
+ - name: common-pile/comma-v0.1-1t
437
+ dtype: float64
438
+ - name: facebook/xglm-564M
439
+ dtype: float64
440
+ - name: google-bert/bert-base-multilingual-cased
441
+ dtype: float64
442
+ - name: google/byt5-small
443
+ dtype: float64
444
+ - name: google/gemma-2-2b
445
+ dtype: float64
446
+ - name: gpt2
447
+ dtype: float64
448
+ - name: meta-llama/Llama-3.2-1B
449
+ dtype: float64
450
+ - name: microsoft/Phi-3-mini-4k-instruct
451
+ dtype: float64
452
+ - name: mistralai/tekken
453
+ dtype: float64
454
+ - name: tiktoken/gpt-4o
455
+ dtype: float64
456
+ - name: tokenmonster/englishcode-32000-consistent-v1
457
+ dtype: float64
458
+ - name: trimmed_cos_sim_to_canonical
459
+ struct:
460
+ - name: CohereLabs/aya-expanse-8b
461
+ dtype: float64
462
+ - name: Qwen/Qwen3-8B
463
+ dtype: float64
464
+ - name: bigscience/bloom
465
+ dtype: float64
466
+ - name: common-pile/comma-v0.1-1t
467
+ dtype: float64
468
+ - name: facebook/xglm-564M
469
+ dtype: float64
470
+ - name: google-bert/bert-base-multilingual-cased
471
+ dtype: float64
472
+ - name: google/byt5-small
473
+ dtype: float64
474
+ - name: google/gemma-2-2b
475
+ dtype: float64
476
+ - name: gpt2
477
+ dtype: float64
478
+ - name: meta-llama/Llama-3.2-1B
479
+ dtype: float64
480
+ - name: microsoft/Phi-3-mini-4k-instruct
481
+ dtype: float64
482
+ - name: mistralai/tekken
483
+ dtype: float64
484
+ - name: tiktoken/gpt-4o
485
+ dtype: float64
486
+ - name: tokenmonster/englishcode-32000-consistent-v1
487
+ dtype: float64
488
+ - name: token_counts
489
+ struct:
490
+ - name: CohereLabs/aya-expanse-8b
491
+ dtype: int64
492
+ - name: Qwen/Qwen3-8B
493
+ dtype: int64
494
+ - name: bigscience/bloom
495
+ dtype: int64
496
+ - name: common-pile/comma-v0.1-1t
497
+ dtype: int64
498
+ - name: facebook/xglm-564M
499
+ dtype: int64
500
+ - name: google-bert/bert-base-multilingual-cased
501
+ dtype: int64
502
+ - name: google/byt5-small
503
+ dtype: int64
504
+ - name: google/gemma-2-2b
505
+ dtype: int64
506
+ - name: gpt2
507
+ dtype: int64
508
+ - name: meta-llama/Llama-3.2-1B
509
+ dtype: int64
510
+ - name: microsoft/Phi-3-mini-4k-instruct
511
+ dtype: int64
512
+ - name: mistralai/tekken
513
+ dtype: int64
514
+ - name: tiktoken/gpt-4o
515
+ dtype: int64
516
+ - name: tokenmonster/englishcode-32000-consistent-v1
517
+ dtype: int64
518
+ splits:
519
+ - name: test
520
+ num_bytes: 23404
521
+ num_examples: 40
522
+ download_size: 40120
523
+ dataset_size: 23404
524
  ---
525
 
526
  # Dataset Card for Tokenization Robustness
tokenizer_robustness_completion_farsi_colloquial/test-00000-of-00001.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dce26eac1c96677d33a87bfe3c4e17bb553b7104cd3cdb8dff68beb7515ef00a
3
+ size 40120