-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy patharxiv_search.rs
More file actions
124 lines (109 loc) · 3.68 KB
/
arxiv_search.rs
File metadata and controls
124 lines (109 loc) · 3.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
use zeroentropy_community::{Client, MetadataValue};
use std::collections::HashMap;
use std::time::Duration;
use tokio::time::sleep;
/// Simplified example of downloading and searching arXiv papers
///
/// Usage:
/// export ZEROENTROPY_API_KEY="your-api-key"
/// cargo run --example arxiv_search
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Load .env file if it exists
dotenv::dotenv().ok();
let client = Client::from_env()?;
let collection = "arxiv_demo";
// Create collection
println!("\n=== Creating Collection ===");
match client.collections().add(collection).await {
Ok(_) => println!("Collection '{}' created", collection),
Err(zeroentropy_community::Error::Conflict(_)) => {
println!("Collection '{}' already exists", collection)
}
Err(e) => return Err(e.into()),
}
// Download a classic paper: "Attention Is All You Need"
let arxiv_id = "1706.03762";
let pdf_url = format!("https://arxiv.org/pdf/{}.pdf", arxiv_id);
println!("\n=== Downloading Paper ===");
println!("Paper: Attention Is All You Need ({})", arxiv_id);
println!("Downloading from: {}", pdf_url);
let http_client = reqwest::Client::new();
let pdf_bytes = http_client
.get(&pdf_url)
.send()
.await?
.bytes()
.await?;
// Save PDF temporarily
let temp_dir = std::env::temp_dir();
let pdf_path = temp_dir.join(format!("arxiv_{}.pdf", arxiv_id));
std::fs::write(&pdf_path, pdf_bytes)?;
println!("Downloaded to: {}", pdf_path.display());
// Index the paper with metadata
println!("\n=== Indexing Paper ===");
let mut metadata = HashMap::new();
metadata.insert(
"title".to_string(),
MetadataValue::String("Attention Is All You Need".to_string()),
);
metadata.insert(
"authors".to_string(),
MetadataValue::String("Vaswani et al.".to_string()),
);
metadata.insert(
"arxiv_id".to_string(),
MetadataValue::String(arxiv_id.to_string()),
);
metadata.insert(
"published".to_string(),
MetadataValue::String("2017-06-12".to_string()),
);
match client
.documents()
.add_pdf_file(
collection,
&format!("arxiv_{}.pdf", arxiv_id),
&pdf_path,
Some(metadata),
)
.await
{
Ok(_) => {
println!("Paper indexed successfully");
// Wait a moment for the document to be fully processed
println!("Waiting for document processing...");
sleep(Duration::from_secs(3)).await;
}
Err(zeroentropy_community::Error::Conflict(_)) => {
println!("Paper already indexed, using existing version");
}
Err(e) => return Err(e.into()),
}
// Search the paper
println!("\n=== Searching Paper ===");
let query = "How does multi-head attention work?";
println!("Query: \"{}\"", query);
let results = client
.queries()
.top_snippets(
collection,
query,
5, // top 5 results
None, // no filter
Some(true), // include metadata
Some(true), // precise responses
None, // default reranker
)
.await?;
println!("\nFound {} results:\n", results.results.len());
for (i, result) in results.results.iter().enumerate() {
println!("{}. Score: {:.4}", i + 1, result.score);
println!(" {}\n", result.content.trim());
println!("{}", "─".repeat(80));
}
// Clean up
std::fs::remove_file(&pdf_path)?;
println!("\nCleaned up temporary PDF");
Ok(())
}