Skip to content

Commit 452e07e

Browse files
author
Zoran Cvetkov
committed
fixes
1 parent 5049ac7 commit 452e07e

File tree

8 files changed

+66
-67
lines changed

8 files changed

+66
-67
lines changed

graph/src/components/store/entity_cache.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -461,7 +461,7 @@ impl EntityCache {
461461
updates.remove_null_fields();
462462
let data = Arc::new(updates);
463463
self.current.insert(key.clone(), Some(data.cheap_clone()));
464-
let vid = data.vid_opt().unwrap_or_default();
464+
let vid = data.vid();
465465
Some(Insert {
466466
key,
467467
data,
@@ -478,7 +478,7 @@ impl EntityCache {
478478
let data = Arc::new(data);
479479
self.current.insert(key.clone(), Some(data.cheap_clone()));
480480
if current != data {
481-
let vid = data.vid_opt().unwrap_or_default();
481+
let vid = data.vid();
482482
Some(Overwrite {
483483
key,
484484
data,

graph/src/data/store/mod.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -920,10 +920,6 @@ impl Entity {
920920
.expect("the vid is set to a valid value")
921921
}
922922

923-
pub fn vid_opt(&self) -> Option<i64> {
924-
self.get("vid").map(|vid| vid.as_int8()).unwrap_or_default()
925-
}
926-
927923
/// Merges an entity update `update` into this entity.
928924
///
929925
/// If a key exists in both entities, the value from `update` is chosen.

runtime/test/src/test.rs

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -477,17 +477,18 @@ async fn test_ipfs_block() {
477477
// The user_data value we use with calls to ipfs_map
478478
const USER_DATA: &str = "user_data";
479479

480-
fn make_thing(id: &str, value: &str) -> (String, EntityModification) {
481-
const DOCUMENT: &str = " type Thing @entity { id: String!, value: String!, extra: String }";
480+
fn make_thing(id: &str, value: &str, vid: i64) -> (String, EntityModification) {
481+
const DOCUMENT: &str =
482+
" type Thing @entity { id: String!, value: String!, extra: String, vid: Int8 }";
482483
lazy_static! {
483484
static ref SCHEMA: InputSchema = InputSchema::raw(DOCUMENT, "doesntmatter");
484485
static ref THING_TYPE: EntityType = SCHEMA.entity_type("Thing").unwrap();
485486
}
486-
let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA };
487+
let data = entity! { SCHEMA => id: id, value: value, extra: USER_DATA, vid:vid };
487488
let key = THING_TYPE.parse_key(id).unwrap();
488489
(
489490
format!("{{ \"id\": \"{}\", \"value\": \"{}\"}}", id, value),
490-
EntityModification::insert(key, data, 0),
491+
EntityModification::insert(key, data, 0, vid),
491492
)
492493
}
493494

@@ -553,8 +554,8 @@ async fn test_ipfs_map(api_version: Version, json_error_msg: &str) {
553554
let subgraph_id = "ipfsMap";
554555

555556
// Try it with two valid objects
556-
let (str1, thing1) = make_thing("one", "eins");
557-
let (str2, thing2) = make_thing("two", "zwei");
557+
let (str1, thing1) = make_thing("one", "eins", 0);
558+
let (str2, thing2) = make_thing("two", "zwei", 0);
558559
let ops = run_ipfs_map(
559560
subgraph_id,
560561
format!("{}\n{}", str1, str2),
@@ -1001,8 +1002,8 @@ async fn test_entity_store(api_version: Version) {
10011002

10021003
let schema = store.input_schema(&deployment.hash).unwrap();
10031004

1004-
let alex = entity! { schema => id: "alex", name: "Alex" };
1005-
let steve = entity! { schema => id: "steve", name: "Steve" };
1005+
let alex = entity! { schema => id: "alex", name: "Alex", vid: 0i64};
1006+
let steve = entity! { schema => id: "steve", name: "Steve", vid: 1i64};
10061007
let user_type = schema.entity_type("User").unwrap();
10071008
test_store::insert_entities(
10081009
&deployment,

store/postgres/src/relational/ddl_tests.rs

Lines changed: 45 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -384,7 +384,7 @@ create type sgd0815."size"
384384
as enum ('large', 'medium', 'small');
385385
386386
create table "sgd0815"."thing" (
387-
vid bigserial primary key,
387+
vid bigint primary key,
388388
block_range int4range not null,
389389
"id" text not null,
390390
"big_thing" text not null
@@ -405,7 +405,7 @@ create index attr_0_1_thing_big_thing
405405
406406
407407
create table "sgd0815"."scalar" (
408-
vid bigserial primary key,
408+
vid bigint primary key,
409409
block_range int4range not null,
410410
"id" text not null,
411411
"bool" boolean,
@@ -444,7 +444,7 @@ create index attr_1_7_scalar_color
444444
445445
446446
create table "sgd0815"."file_thing" (
447-
vid bigserial primary key,
447+
vid bigint primary key,
448448
block_range int4range not null,
449449
causality_region int not null,
450450
"id" text not null
@@ -469,7 +469,7 @@ create type sgd0815."size"
469469
as enum ('large', 'medium', 'small');
470470
471471
create table "sgd0815"."thing" (
472-
vid bigserial primary key,
472+
vid bigint primary key,
473473
block_range int4range not null,
474474
"id" text not null,
475475
"big_thing" text not null
@@ -490,7 +490,7 @@ create index attr_0_1_thing_big_thing
490490
491491
492492
create table "sgd0815"."scalar" (
493-
vid bigserial primary key,
493+
vid bigint primary key,
494494
block_range int4range not null,
495495
"id" text not null,
496496
"bool" boolean,
@@ -515,7 +515,7 @@ create index attr_1_0_scalar_id
515515
516516
517517
create table "sgd0815"."file_thing" (
518-
vid bigserial primary key,
518+
vid bigint primary key,
519519
block_range int4range not null,
520520
causality_region int not null,
521521
"id" text not null
@@ -575,7 +575,7 @@ type SongStat @entity {
575575
played: Int!
576576
}"#;
577577
const MUSIC_DDL: &str = r#"create table "sgd0815"."musician" (
578-
vid bigserial primary key,
578+
vid bigint primary key,
579579
block_range int4range not null,
580580
"id" text not null,
581581
"name" text not null,
@@ -598,7 +598,7 @@ create index attr_0_2_musician_main_band
598598
on "sgd0815"."musician" using gist("main_band", block_range);
599599
600600
create table "sgd0815"."band" (
601-
vid bigserial primary key,
601+
vid bigint primary key,
602602
block_range int4range not null,
603603
"id" text not null,
604604
"name" text not null,
@@ -618,8 +618,8 @@ create index attr_1_1_band_name
618618
on "sgd0815"."band" using btree(left("name", 256));
619619
620620
create table "sgd0815"."song" (
621-
vid bigserial primary key,
622-
block$ int not null,
621+
vid bigint primary key,
622+
block$ int not null,
623623
"id" text not null,
624624
"title" text not null,
625625
"written_by" text not null,
@@ -634,7 +634,7 @@ create index attr_2_1_song_written_by
634634
on "sgd0815"."song" using btree("written_by", block$);
635635
636636
create table "sgd0815"."song_stat" (
637-
vid bigserial primary key,
637+
vid bigint primary key,
638638
block_range int4range not null,
639639
"id" text not null,
640640
"played" int4 not null
@@ -676,7 +676,7 @@ type Habitat @entity {
676676
}"#;
677677

678678
const FOREST_DDL: &str = r#"create table "sgd0815"."animal" (
679-
vid bigserial primary key,
679+
vid bigint primary key,
680680
block_range int4range not null,
681681
"id" text not null,
682682
"forest" text
@@ -695,8 +695,8 @@ create index attr_0_1_animal_forest
695695
on "sgd0815"."animal" using gist("forest", block_range);
696696
697697
create table "sgd0815"."forest" (
698-
vid bigserial primary key,
699-
block_range int4range not null,
698+
vid bigint primary key,
699+
block_range int4range not null,
700700
"id" text not null
701701
);
702702
alter table "sgd0815"."forest"
@@ -711,7 +711,7 @@ create index attr_1_0_forest_id
711711
on "sgd0815"."forest" using btree("id");
712712
713713
create table "sgd0815"."habitat" (
714-
vid bigserial primary key,
714+
vid bigint primary key,
715715
block_range int4range not null,
716716
"id" text not null,
717717
"most_common" text not null,
@@ -763,7 +763,7 @@ type Habitat @entity {
763763
}"#;
764764

765765
const FULLTEXT_DDL: &str = r#"create table "sgd0815"."animal" (
766-
vid bigserial primary key,
766+
vid bigint primary key,
767767
block_range int4range not null,
768768
"id" text not null,
769769
"name" text not null,
@@ -791,7 +791,7 @@ create index attr_0_4_animal_search
791791
on "sgd0815"."animal" using gin("search");
792792
793793
create table "sgd0815"."forest" (
794-
vid bigserial primary key,
794+
vid bigint primary key,
795795
block_range int4range not null,
796796
"id" text not null
797797
);
@@ -808,7 +808,7 @@ create index attr_1_0_forest_id
808808
on "sgd0815"."forest" using btree("id");
809809
810810
create table "sgd0815"."habitat" (
811-
vid bigserial primary key,
811+
vid bigint primary key,
812812
block_range int4range not null,
813813
"id" text not null,
814814
"most_common" text not null,
@@ -843,7 +843,7 @@ enum Orientation {
843843
const FORWARD_ENUM_SQL: &str = r#"create type sgd0815."orientation"
844844
as enum ('DOWN', 'UP');
845845
create table "sgd0815"."thing" (
846-
vid bigserial primary key,
846+
vid bigint primary key,
847847
block_range int4range not null,
848848
"id" text not null,
849849
"orientation" "sgd0815"."orientation" not null
@@ -880,8 +880,8 @@ type Stats @aggregation(intervals: ["hour", "day"], source: "Data") {
880880

881881
const TS_SQL: &str = r#"
882882
create table "sgd0815"."data" (
883-
vid bigserial primary key,
884-
block$ int not null,
883+
vid bigint primary key,
884+
block$ int not null,
885885
"id" int8 not null,
886886
"timestamp" timestamptz not null,
887887
"amount" numeric not null,
@@ -895,8 +895,8 @@ create index attr_0_1_data_amount
895895
on "sgd0815"."data" using btree("amount");
896896
897897
create table "sgd0815"."stats_hour" (
898-
vid bigserial primary key,
899-
block$ int not null,
898+
vid bigint primary key,
899+
block$ int not null,
900900
"id" int8 not null,
901901
"timestamp" timestamptz not null,
902902
"volume" numeric not null,
@@ -913,7 +913,7 @@ create index attr_1_2_stats_hour_max_price
913913
on "sgd0815"."stats_hour" using btree("max_price");
914914
915915
create table "sgd0815"."stats_day" (
916-
vid bigserial primary key,
916+
vid bigint primary key,
917917
block$ int not null,
918918
"id" int8 not null,
919919
"timestamp" timestamptz not null,
@@ -971,9 +971,9 @@ const LIFETIME_GQL: &str = r#"
971971

972972
const LIFETIME_SQL: &str = r#"
973973
create table "sgd0815"."data" (
974-
vid bigserial primary key,
975-
block$ int not null,
976-
"id" int8 not null,
974+
vid bigint primary key,
975+
block$ int not null,
976+
"id" int8 not null,
977977
"timestamp" timestamptz not null,
978978
"group_1" int4 not null,
979979
"group_2" int4 not null,
@@ -992,9 +992,9 @@ create index attr_0_3_data_amount
992992
on "sgd0815"."data" using btree("amount");
993993
994994
create table "sgd0815"."stats_1_hour" (
995-
vid bigserial primary key,
996-
block$ int not null,
997-
"id" int8 not null,
995+
vid bigint primary key,
996+
block$ int not null,
997+
"id" int8 not null,
998998
"timestamp" timestamptz not null,
999999
"volume" numeric not null,
10001000
unique(id)
@@ -1008,9 +1008,9 @@ on "sgd0815"."stats_1_hour" using btree("volume");
10081008
10091009
10101010
create table "sgd0815"."stats_1_day" (
1011-
vid bigserial primary key,
1012-
block$ int not null,
1013-
"id" int8 not null,
1011+
vid bigint primary key,
1012+
block$ int not null,
1013+
"id" int8 not null,
10141014
"timestamp" timestamptz not null,
10151015
"volume" numeric not null,
10161016
unique(id)
@@ -1024,9 +1024,9 @@ on "sgd0815"."stats_1_day" using btree("volume");
10241024
10251025
10261026
create table "sgd0815"."stats_2_hour" (
1027-
vid bigserial primary key,
1028-
block$ int not null,
1029-
"id" int8 not null,
1027+
vid bigint primary key,
1028+
block$ int not null,
1029+
"id" int8 not null,
10301030
"timestamp" timestamptz not null,
10311031
"group_1" int4 not null,
10321032
"volume" numeric not null,
@@ -1044,9 +1044,9 @@ create index stats_2_hour_dims
10441044
on "sgd0815"."stats_2_hour"(group_1, timestamp);
10451045
10461046
create table "sgd0815"."stats_2_day" (
1047-
vid bigserial primary key,
1048-
block$ int not null,
1049-
"id" int8 not null,
1047+
vid bigint primary key,
1048+
block$ int not null,
1049+
"id" int8 not null,
10501050
"timestamp" timestamptz not null,
10511051
"group_1" int4 not null,
10521052
"volume" numeric not null,
@@ -1064,9 +1064,9 @@ create index stats_2_day_dims
10641064
on "sgd0815"."stats_2_day"(group_1, timestamp);
10651065
10661066
create table "sgd0815"."stats_3_hour" (
1067-
vid bigserial primary key,
1068-
block$ int not null,
1069-
"id" int8 not null,
1067+
vid bigint primary key,
1068+
block$ int not null,
1069+
"id" int8 not null,
10701070
"timestamp" timestamptz not null,
10711071
"group_2" int4 not null,
10721072
"group_1" int4 not null,
@@ -1087,9 +1087,9 @@ create index stats_3_hour_dims
10871087
on "sgd0815"."stats_3_hour"(group_2, group_1, timestamp);
10881088
10891089
create table "sgd0815"."stats_3_day" (
1090-
vid bigserial primary key,
1091-
block$ int not null,
1092-
"id" int8 not null,
1090+
vid bigint primary key,
1091+
block$ int not null,
1092+
"id" int8 not null,
10931093
"timestamp" timestamptz not null,
10941094
"group_2" int4 not null,
10951095
"group_1" int4 not null,

store/test-store/tests/graph/entity_cache.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -221,8 +221,8 @@ fn insert_modifications() {
221221
assert_eq!(
222222
sort_by_entity_key(result.unwrap().modifications),
223223
sort_by_entity_key(vec![
224-
EntityModification::insert(mogwai_key, mogwai_data, 0),
225-
EntityModification::insert(sigurros_key, sigurros_data, 0)
224+
EntityModification::insert(mogwai_key, mogwai_data, 0, 0),
225+
EntityModification::insert(sigurros_key, sigurros_data, 0, 0)
226226
])
227227
);
228228
}
@@ -265,8 +265,8 @@ fn overwrite_modifications() {
265265
assert_eq!(
266266
sort_by_entity_key(result.unwrap().modifications),
267267
sort_by_entity_key(vec![
268-
EntityModification::overwrite(mogwai_key, mogwai_data, 0),
269-
EntityModification::overwrite(sigurros_key, sigurros_data, 0)
268+
EntityModification::overwrite(mogwai_key, mogwai_data, 0, 0),
269+
EntityModification::overwrite(sigurros_key, sigurros_data, 0, 0)
270270
])
271271
);
272272
}
@@ -304,6 +304,7 @@ fn consecutive_modifications() {
304304
sort_by_entity_key(vec![EntityModification::overwrite(
305305
update_key,
306306
entity! { SCHEMA => id: "mogwai", name: "Mogwai", founded: 1995 },
307+
0,
307308
0
308309
)])
309310
);

store/test-store/tests/postgres/relational.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -891,6 +891,7 @@ fn conflicting_entity() {
891891
data: fred,
892892
block: 2,
893893
end: None,
894+
vid: 0,
894895
},
895896
2,
896897
)

0 commit comments

Comments
 (0)