Skip to content

Commit 01b0be3

Browse files
mlgodskzromandukhanininha1e
authored
RRP-318: Add worker authentication (#11)
* gitignore localtemp * PoolTest Conf * ShareLog init * External Hello Init * shares_logger init * log_share Share hash * static LOGGER_SENDER Bounded channel * primary_rx backup_rx * ShareLogProto * pub struct ShareLog tested * ShareLogProto removed * imports remove * ClickHouse init tested * insert_batch clickhouse 0.13.1 * share_processor.rs Tested * with share_diff * TEMP remove clickhouse channel * 169-3-1 DebugLog Proto * 169-3-1 Debugger-Logger-v.1 * services::debug_log commented * MATERIALIZED VIEW mv_hash_rate_stats * shares_per_minute to 10 * Backtrace::force_capture * 169-3-1 ExtendedChannelKind * rm -rf hello :) * 169-3-1 new config.rs * 169-3-1 comments to English * 169-3-1 README * 169-3-1 README * 169-3-1 rm handshake ) * 169-5 ShareLog to models * ClickhouseShare to models * 169-5 share to share_log * TEMP 169-5 New Config Mod (ENV in progress) * 169-5 TEMP ENV * 169-5 ENV to settings * 169-5 README * 169-5 Syntax Fix * 169-5 Spaces Fix * 169-5 ClickhouseError to mod * 169-5 is_valid to ShareStatus * 169-5 ClickHouse to trait * 169-5 Deps Removed * 169-5 ShareLogger refactoring * test_highload_share_processing * 169-5 test_highload_share_processing improve * 169-5 README Refreshed * 169-6 ShareData * 169-6 test_share_types * 169-6 TEMP BlockFound * 169-6 shares_logger::log_block(block) * 169-6 store_share on BlockFound * 169-7 Queries To Files * 169-7 Small Fix * 169-7 README * 169-7 Cargo Fix * 169-7 share.is_block_found fn * 169-7 Small Fixes * min_individual_miner_hashrate * RRP-318 nomium_test_worker from Translator to Pool hardcoded * RRP-318 user_identity from worker * RRP-318 user_identity to prepare_share_log * RRP-318 user_identity to ClickHouse 1 * RRP-318 user_identity DB optimized * RRP-318 user_identity on BlockFound * 169-11 handle_authorize with reqwest proto * 169-11 worker_name_store MOD * 169-11 worker_name_store to shares-logger * 169-11 shares with worker_id * 169-11 worker_id on blocks * 169-11 Tables n MV updated * 169-11 mv_hash_rate_stats Readme-Ru * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * Update rrp-translator.Dockerfile * 169-11 wo OpenSSL * Update rrp-translator.Dockerfile * Update docker-compose.yml * Update docker-compose.yml * 169-11 new mv_hash_rate_stats * 169-11 Remove Obsolete Tests and fn to_storage_format * 169-11 README-RU.md mod * 169-11 share_data Trait rm * 169-11 ENV REDROCK_API_URL REDROCK_API_KEY * 169-11 New MV n HR Algo * 169-11 small * 169-11 Timeout to ENV etc --------- Co-authored-by: Roman Dukhanin <romandukhanin@yandex.ru> Co-authored-by: inha1e <78920981+inha1e@users.noreply.github.com>
1 parent 66cab73 commit 01b0be3

File tree

31 files changed

+653
-589
lines changed

31 files changed

+653
-589
lines changed

cicd/docker-compose.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,9 @@ services:
77
- CLICKHOUSE_USER=${CLICKHOUSE_USER}
88
- CLICKHOUSE_PASSWORD=${CLICKHOUSE_PASSWORD}
99
restart: always
10+
privileged: true
11+
volumes:
12+
- /var/log/clickhouse-server:/var/log/clickhouse-server
1013
ports:
1114
- 8123:8123
1215
network_mode: host

cicd/rrp-translator.Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
FROM rust:1.82 AS build
22

3-
RUN apt-get update && apt-get install -y musl-tools
3+
RUN apt-get update
4+
RUN apt-get install -y musl-tools
45

56
WORKDIR /stratum
67
COPY ./ .

nomium/shares-logger/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ serde_json = "1.0"
1616
primitive-types = "0.12"
1717
lazy_static = "1.4"
1818
config = "0.13"
19+
parking_lot = "0.12"
1920

2021
[dev-dependencies]
2122
tokio = { version = "1.0", features = ["rt", "macros", "test-util", "time"] }

nomium/shares-logger/README-RU.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# Инструкция как из консоли Linux пересоздать MV и заполнить его данными из источника
2+
3+
1. Добавить в MV ключевое слово: POPULATE (уже добавил в nomium/shares-logger/src/storage/clickhouse/queries/hashrate_view.sql)
4+
5+
2. Данные доступа и пути к файлам поменять на свои:
6+
7+
Проверка существования представления:
8+
```bash
9+
echo "SHOW TABLES FROM mining LIKE 'mv_hash_rate%'" | curl 'http://localhost:8123/?database=mining' --data-binary @- -u default:5555
10+
```
11+
12+
Удаление существующего представления:
13+
```bash
14+
echo "DROP TABLE IF EXISTS mining.mv_hash_rate_stats" | curl 'http://localhost:8123/?database=mining' --data-binary @- -u default:5555
15+
```
16+
17+
Создание нового представления из файла:
18+
```bash
19+
curl 'http://localhost:8123/?user=default&password=5555&database=mining' --data-binary @/home/ro/projects/nomium/dev/stratum/nomium/shares-logger/src/storage/clickhouse/queries/hashrate_view.sql
20+
```
21+
22+
Проверка что представление создалось:
23+
```bash
24+
echo "SELECT name, engine FROM system.tables WHERE database = 'mining' AND name = 'mv_hash_rate_stats'" | curl 'http://localhost:8123/?database=mining' --data-binary @- -u default:5555
25+
```
26+
27+
3. Базовые запросы к MV:
28+
29+
// в связи работами над уточнением хэшрейта запросы и структура MV сильно изменятся, ожидаем.

nomium/shares-logger/src/lib.rs

Lines changed: 11 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@ pub mod models;
44
pub mod services;
55
pub mod storage;
66
pub mod traits;
7+
pub mod worker_name_store;
78

89
use crate::config::SETTINGS;
9-
use crate::traits::ShareData;
1010
use log::info;
1111
use std::sync::Arc;
1212
use tokio::sync::{mpsc::{self, error::TrySendError}, Mutex};
@@ -18,6 +18,8 @@ use crate::traits::ShareStorage;
1818
use crate::storage::clickhouse::ClickhouseBlockStorage;
1919
use crate::models::BlockFound;
2020
use std::time::Instant;
21+
use serde::Serialize;
22+
use serde::de::DeserializeOwned;
2123

2224
lazy_static! {
2325
static ref GLOBAL_LOGGER: ShareLogger<ShareLog> = {
@@ -45,18 +47,18 @@ pub fn log_block(block: BlockFound) {
4547
BLOCK_LOGGER.log_share(block);
4648
}
4749

48-
pub struct ShareLogger<T: ShareData> {
50+
pub struct ShareLogger<T: Send + Sync + Clone + Serialize + DeserializeOwned> {
4951
primary_tx: mpsc::Sender<T>,
5052
backup_tx: mpsc::UnboundedSender<T>,
5153
}
5254

53-
pub struct ShareLoggerBuilder<T: ShareData> {
55+
pub struct ShareLoggerBuilder<T: Send + Sync + Clone + Serialize + DeserializeOwned> {
5456
storage: Arc<Mutex<Box<dyn ShareStorage<T>>>>,
5557
primary_channel_size: Option<usize>,
5658
backup_check_interval: Option<Duration>,
5759
}
5860

59-
impl<T: ShareData + 'static> ShareLoggerBuilder<T> {
61+
impl<T: Send + Sync + Clone + Serialize + DeserializeOwned + 'static> ShareLoggerBuilder<T> {
6062

6163
pub fn new(storage: Box<dyn ShareStorage<T>>) -> Self {
6264
Self {
@@ -103,7 +105,7 @@ impl<T: ShareData + 'static> ShareLoggerBuilder<T> {
103105
}
104106
}
105107

106-
impl<T: ShareData + 'static> ShareLogger<T> {
108+
impl<T: Send + Sync + Clone + Serialize + DeserializeOwned + 'static> ShareLogger<T> {
107109
pub fn log_share(&self, share: T) {
108110
match self.primary_tx.try_send(share.clone()) {
109111
Ok(_) => (),
@@ -116,7 +118,7 @@ impl<T: ShareData + 'static> ShareLogger<T> {
116118
}
117119
}
118120

119-
async fn process_shares<T: ShareData>(
121+
async fn process_shares<T: Send + Sync + Clone + Serialize + DeserializeOwned>(
120122
mut primary_rx: mpsc::Receiver<T>,
121123
mut backup_rx: mpsc::UnboundedReceiver<T>,
122124
storage: Arc<Mutex<Box<dyn ShareStorage<T>>>>,
@@ -129,36 +131,20 @@ async fn process_shares<T: ShareData>(
129131
}
130132
let init_duration = init_start.elapsed();
131133
info!("Storage initialized in: {:?}", init_duration);
132-
133134
let mut backup_interval = tokio::time::interval(backup_check_interval);
134-
135135
loop {
136136
tokio::select! {
137137
Some(share) = primary_rx.recv() => {
138138
info!("Processing share from primary channel");
139-
140-
if share.is_block_found() {
141-
if let Err(e) = storage.lock().await.store_share(share).await {
142-
info!("Failed to store block: {}", e);
143-
}
144-
} else {
145-
if let Err(e) = storage.lock().await.store_share(share).await {
146-
info!("Failed to store share: {}", e);
147-
}
139+
if let Err(e) = storage.lock().await.store_share(share).await {
140+
info!("Failed to store share: {}", e);
148141
}
149142
}
150143
_ = backup_interval.tick() => {
151144
let mut backup_shares = Vec::new();
152145
while let Ok(share) = backup_rx.try_recv() {
153-
if share.is_block_found() {
154-
if let Err(e) = storage.lock().await.store_share(share).await {
155-
info!("Failed to store backup block: {}", e);
156-
}
157-
} else {
158-
backup_shares.push(share);
159-
}
146+
backup_shares.push(share);
160147
}
161-
162148
if !backup_shares.is_empty() {
163149
if let Err(e) = storage.lock().await.store_batch(backup_shares).await {
164150
info!("Failed to store backup shares: {}", e);
Lines changed: 37 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,50 @@
11
use serde::{Serialize, Deserialize};
2-
use crate::traits::ShareData;
3-
use async_trait::async_trait;
2+
use serde_json::Value;
3+
use serde_json::json;
4+
use log::info;
45

56
#[derive(Debug, Clone, Serialize, Deserialize)]
67
pub struct BlockFound {
78
pub channel_id: u32,
89
pub block_hash: Vec<u8>,
910
pub ntime: u32,
11+
pub user_identity: String,
12+
pub worker_id: String,
1013
}
1114

12-
#[async_trait]
13-
impl ShareData for BlockFound {
14-
fn get_identifier(&self) -> String {
15-
format!("{}_{}_{}", self.channel_id, hex::encode(&self.block_hash), self.ntime)
16-
}
15+
impl BlockFound {
16+
pub fn prepare_block(
17+
channel_id: u32,
18+
block_hash: Vec<u8>,
19+
ntime: u32,
20+
user_identity_json: String,
21+
) -> Self {
22+
info!("Preparing block with user_identity_json: {}", user_identity_json);
23+
24+
let worker_identity: Value = serde_json::from_str(&user_identity_json)
25+
.unwrap_or_else(|_| json!({
26+
"worker_name": user_identity_json.clone(),
27+
"worker_id": "unknown"
28+
}));
1729

18-
async fn validate(&self) -> bool {
19-
self.block_hash.len() == 32
20-
}
30+
let user_identity = worker_identity["worker_name"]
31+
.as_str()
32+
.unwrap_or(&user_identity_json)
33+
.to_string();
2134

22-
fn to_storage_format(&self) -> Vec<(String, String)> {
23-
vec![
24-
("channel_id".to_string(), self.channel_id.to_string()),
25-
("block_hash".to_string(), hex::encode(&self.block_hash)),
26-
("ntime".to_string(), self.ntime.to_string()),
27-
]
28-
}
35+
let worker_id = worker_identity["worker_id"]
36+
.as_str()
37+
.unwrap_or("unknown")
38+
.to_string();
2939

30-
fn is_block_found(&self) -> bool { true }
40+
info!("Block prepared with worker_id: {}", worker_id);
41+
42+
BlockFound {
43+
channel_id,
44+
block_hash,
45+
ntime,
46+
user_identity,
47+
worker_id,
48+
}
49+
}
3150
}

nomium/shares-logger/src/models/clickhouse_block.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,13 @@ use crate::models::BlockFound;
22
use clickhouse::Row;
33
use serde::Serialize;
44

5-
#[derive(Row, Serialize)]
5+
#[derive(Row, Serialize, Debug)]
66
pub struct ClickhouseBlock {
77
pub channel_id: u32,
88
pub block_hash: String,
99
pub ntime: u32,
10+
pub user_identity: String,
11+
pub worker_id: String,
1012
}
1113

1214
impl From<BlockFound> for ClickhouseBlock {
@@ -15,6 +17,8 @@ impl From<BlockFound> for ClickhouseBlock {
1517
channel_id: block.channel_id,
1618
block_hash: hex::encode(&block.block_hash),
1719
ntime: block.ntime,
20+
user_identity: block.user_identity,
21+
worker_id: block.worker_id,
1822
}
1923
}
2024
}

nomium/shares-logger/src/models/clickhouse_share.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ pub struct ClickhouseShare {
1414
pub share_status: u8,
1515
pub extranonce: String,
1616
pub difficulty: f64,
17+
pub user_identity: String,
18+
pub worker_id: String,
1719
}
1820

1921
impl From<ShareLog> for ClickhouseShare {
@@ -42,6 +44,8 @@ impl From<ShareLog> for ClickhouseShare {
4244
share_status: share.share_status as u8,
4345
extranonce: extranonce_hex,
4446
difficulty: share.difficulty,
47+
user_identity: share.user_identity,
48+
worker_id: share.worker_id,
4549
}
4650
}
4751
}
Lines changed: 7 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
11
use serde::{Serialize, Deserialize};
2-
use crate::traits::ShareData;
3-
use async_trait::async_trait;
42

53
#[derive(Debug, Clone, Serialize, Deserialize)]
64
pub struct ShareLog {
@@ -14,6 +12,8 @@ pub struct ShareLog {
1412
pub share_status: ShareStatus,
1513
pub extranonce: Vec<u8>,
1614
pub difficulty: f64,
15+
pub user_identity: String,
16+
pub worker_id: String,
1717
}
1818

1919
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
@@ -36,6 +36,8 @@ impl ShareLog {
3636
share_status: ShareStatus,
3737
extranonce: Vec<u8>,
3838
difficulty: f64,
39+
user_identity: String,
40+
worker_id: String,
3941
) -> Self {
4042
Self {
4143
channel_id,
@@ -47,35 +49,9 @@ impl ShareLog {
4749
hash,
4850
share_status,
4951
extranonce,
50-
difficulty
52+
difficulty,
53+
user_identity,
54+
worker_id,
5155
}
5256
}
53-
}
54-
55-
#[async_trait]
56-
impl ShareData for ShareLog {
57-
fn get_identifier(&self) -> String {
58-
format!("{}_{}", self.channel_id, self.sequence_number)
59-
}
60-
61-
async fn validate(&self) -> bool {
62-
true
63-
}
64-
65-
fn to_storage_format(&self) -> Vec<(String, String)> {
66-
vec![
67-
("channel_id".to_string(), self.channel_id.to_string()),
68-
("sequence_number".to_string(), self.sequence_number.to_string()),
69-
("job_id".to_string(), self.job_id.to_string()),
70-
("nonce".to_string(), self.nonce.to_string()),
71-
("ntime".to_string(), self.ntime.to_string()),
72-
("version".to_string(), self.version.to_string()),
73-
("hash".to_string(), hex::encode(&self.hash)),
74-
("share_status".to_string(), (self.share_status as u8).to_string()),
75-
("extranonce".to_string(), hex::encode(&self.extranonce)),
76-
("difficulty".to_string(), self.difficulty.to_string()),
77-
]
78-
}
79-
80-
fn is_block_found(&self) -> bool { false }
8157
}

nomium/shares-logger/src/services/share_processor.rs

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ use crate::models::ShareLog;
22
use mining_sv2::Target;
33
use super::difficulty::DifficultyService;
44
use crate::models::ShareStatus;
5+
use log::info;
6+
use serde_json::Value;
7+
use serde_json::json;
58

69
pub struct ShareProcessor;
710

@@ -15,8 +18,26 @@ impl ShareProcessor {
1518
version: u32,
1619
hash: [u8; 32],
1720
downstream_target: Target,
18-
extranonce: Vec<u8>, // Принимаем Vec<u8>
21+
extranonce: Vec<u8>,
22+
user_identity_json: String,
1923
) -> ShareLog {
24+
let worker_identity: Value = serde_json::from_str(&user_identity_json)
25+
.unwrap_or_else(|_| json!({
26+
"worker_name": user_identity_json.clone(),
27+
"worker_id": "unknown"
28+
}));
29+
30+
let user_identity = worker_identity["worker_name"]
31+
.as_str()
32+
.unwrap_or(&user_identity_json)
33+
.to_string();
34+
35+
let worker_id = worker_identity["worker_id"]
36+
.as_str()
37+
.unwrap_or("unknown")
38+
.to_string();
39+
40+
info!("user_identity from prepare_share_log: {}", user_identity);
2041
let mut hash_bytes = hash;
2142
hash_bytes.reverse();
2243
let difficulty = DifficultyService::calculate_difficulty_from_hash(&hash_bytes);
@@ -34,6 +55,8 @@ impl ShareProcessor {
3455
status,
3556
extranonce,
3657
difficulty,
58+
user_identity,
59+
worker_id,
3760
)
3861
}
3962
}

0 commit comments

Comments
 (0)