Skip to content

Commit ae203ff

Browse files
committed
feat(graphs): create paper graphics
1 parent bca6470 commit ae203ff

File tree

9 files changed

+68
-4
lines changed

9 files changed

+68
-4
lines changed

README.md

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ the more effective the evaluation metric is in capturing the nuances of sign lan
2222
- [Tokenized BLEU](signwriting_evaluation/metrics/bleu.py) - BLEU score for tokenized SignWriting FSW strings.
2323
- [chrF](signwriting_evaluation/metrics/chrf.py) - chrF score for untokenized SignWriting FSW strings.
2424
- [CLIPScore](signwriting_evaluation/metrics/clipscore.py) - CLIPScore between SignWriting images. (Using the original CLIP model)
25-
- [Similarity](signwriting_evaluation/metrics/similarity.py) - symbol distance score for SignWriting FSW strings [(README)](signwriting_evaluation/metrics/similarity).
25+
- [Similarity](signwriting_evaluation/metrics/similarity.py) - symbol distance score for SignWriting FSW strings [(README)](signwriting_evaluation/metrics/similarity.md).
2626

2727
## Qualitative Evaluation
2828

@@ -62,6 +62,17 @@ For each sign and metric, either the first match is incorrect, or there is a mor
6262
</tbody>
6363
</table>
6464

65+
## Cite
66+
If you use our toolkit in your research or projects, please consider citing the work.
67+
68+
```bib
69+
@misc{signwriting-evaluation2024,
70+
title={SignWriting Evaluation: Metrics for Evaluating SignWriting Transcription and Translation Models},
71+
author={Moryossef, Amit and Zilberman, Rotem and Langer, Ohad},
72+
howpublished={\url{https://github.com/sign-language-processing/signwriting-evaluation}},
73+
year={2024}
74+
}
75+
```
6576

6677
## References
6778

assets/distribution/CHRF.png

3.37 KB
Loading

assets/distribution/CLIPScore.png

3.75 KB
Loading
2.83 KB
Loading
3.42 KB
Loading

assets/distribution/all.png

5.1 KB
Loading

signwriting_evaluation/evaluation/closest_matches.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,13 @@
1414
ASSETS_DIR = CURRENT_DIR.parent.parent / "assets"
1515

1616

17+
# Set the font to Times Roman
18+
plt.rcParams['font.family'] = 'Times New Roman'
19+
plt.rcParams['font.serif'] = ['Times New Roman'] + plt.rcParams['font.serif']
20+
# increase font size
21+
plt.rcParams.update({'font.size': 14})
22+
23+
1724
def load_signs(signs_file: Path):
1825
with open(signs_file, 'r', encoding='utf-8') as signs_f:
1926
signs = signs_f.read().splitlines()
@@ -78,6 +85,8 @@ def metrics_distribution(signs: list[str], metrics: list[SignWritingMetric]):
7885
plt.legend(loc="upper right")
7986
plt.tight_layout()
8087
plt.savefig(distribution_dir / "all.png")
88+
plt.savefig(distribution_dir / "all.pdf")
89+
8190
plt.close()
8291

8392

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# pylint: disable=line-too-long, duplicate-code
2+
from pathlib import Path
3+
4+
if __name__ == "__main__":
5+
matches_dir = Path(__file__).parent.parent.parent / "assets" / "matches"
6+
7+
# LaTeX header rows
8+
signs_header = ["&"]
9+
metrics_header = ["Rank"]
10+
11+
# LaTeX body rows
12+
rows = [[str(i + 1)] for i in range(10)] # 10 rows
13+
14+
for sign_dir in matches_dir.iterdir():
15+
# pylint: disable=invalid-name
16+
colspan = 0
17+
for metric_dir in sign_dir.iterdir():
18+
if metric_dir.is_dir():
19+
colspan += 1
20+
metrics_header.append(f"\\texttt{{{metric_dir.name}}}")
21+
for i in range(10):
22+
rows[i].append(
23+
f"\\includegraphics[width=0.07\\textwidth]{{assets/matches/{sign_dir.name}/{metric_dir.name}/{i}.png}}"
24+
)
25+
signs_header.append(
26+
f"\\multicolumn{{{colspan}}}{{c|}}{{\\includegraphics[width=0.1\\textwidth]{{assets/matches/{sign_dir.name}/ref.png}}}}"
27+
)
28+
29+
# Create LaTeX table
30+
print("\\begin{table*}[ht]")
31+
print(" \\centering")
32+
print(" \\begin{tabular}{c|" + "c" * len(metrics_header) + "}")
33+
print(" \\toprule")
34+
print(f" {' & '.join(signs_header)} \\\\")
35+
print(" \\cmidrule{2-" + str(len(metrics_header) + 1) + "}")
36+
print(f" {' & '.join(metrics_header)} \\\\")
37+
print(" \\midrule")
38+
for row in rows:
39+
print(f" {' & '.join(row)} \\\\")
40+
print(" \\bottomrule")
41+
print(" \\end{tabular}")
42+
print(" \\caption{Top 10 nearest neighbors for selected signs using different evaluation metrics. The reference signs are shown at the top, and the retrieved signs are displayed in order of decreasing similarity score from left to right.}")
43+
print(" \\label{tab:nearest_neighbors}")
44+
print("\\end{table*}")

signwriting_evaluation/metrics/clip.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from PIL import Image
88
from signwriting.visualizer.visualize import signwriting_to_image
99
from tqdm import tqdm
10-
from transformers import AutoModel, AutoProcessor
1110

1211
from signwriting_evaluation.metrics.base import SignWritingMetric
1312

@@ -51,9 +50,10 @@ def __init__(self,
5150
device=None):
5251
super().__init__(name="CLIPScore")
5352

54-
# Init CLIP model
55-
self.model = AutoModel.from_pretrained(model_id)
53+
# Init CLIP model pylint: disable=import-outside-toplevel
54+
from transformers import AutoModel, AutoProcessor
5655
self.processor = AutoProcessor.from_pretrained(model_id)
56+
self.model = AutoModel.from_pretrained(model_id)
5757

5858
# Init cache
5959
if cache_directory is None:

0 commit comments

Comments
 (0)