Commit e6e6e2d8 authored by Koen van der Veen's avatar Koen van der Veen
Browse files

merge

parent 590f0067
Pipeline #7593 failed with stage
in 6 minutes and 7 seconds
Showing with 235 additions and 71 deletions
+235 -71
......@@ -520,8 +520,7 @@ dependencies = [
[[package]]
name = "graphql-parser"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2ebc8013b4426d5b81a4364c419a95ed0b404af2b82e2457de52d9348f0e474"
source = "git+https://github.com/alpdeniz/graphql-parser?branch=tilda_as_reverse_edges#310e71a03071f253e3b41c7cb7b7d7ce45c7147d"
dependencies = [
"combine",
"thiserror",
......
......@@ -37,7 +37,7 @@ time = { version = "0.3.5", features = ["formatting", "macros"] }
tokio = { version = "1.14.0", features = ["full"] }
warp = { version = "0.3.2", default-features = false, features = ["tls"] }
zeroize = "1.4.3"
graphql-parser = "0.4.0"
graphql-parser = { git = "https://github.com/alpdeniz/graphql-parser", branch = "tilda_as_reverse_edges" }
[dev-dependencies]
criterion = "0.3.5"
......
......@@ -17,8 +17,11 @@ pub fn deep_join(c: &mut Criterion) {
let mut schema = database_api::get_schema(&tx).unwrap();
let num_items = 2000;
let num_edges = 100;
// create level 1
for i in 1..1001 {
for i in 0..num_items / 2 {
let _person = {
let person = json!({
"type": "Person",
......@@ -30,7 +33,7 @@ pub fn deep_join(c: &mut Criterion) {
}
// create level 2
for i in 1001..2001 {
for i in num_items / 2..num_items {
let _person = {
let person = json!({
"type": "Person",
......@@ -62,7 +65,7 @@ pub fn deep_join(c: &mut Criterion) {
// Get updated schema
let mut schema = database_api::get_schema(&tx).unwrap();
for i in 1..101 {
for i in 0..num_edges {
let _edge = {
let json = json!({
"_source": "00000".to_owned() + &i.to_string(),
......@@ -86,7 +89,7 @@ pub fn deep_join(c: &mut Criterion) {
serde_json::from_value(search_json.clone()).unwrap(),
)
.unwrap();
assert_eq!(res.len(), 2000);
assert_eq!(res.len(), num_items);
// graphql search
let query = "
......
......@@ -310,25 +310,11 @@ pub struct GQLSearchArgs {
pub rowids: Option<Vec<Rowid>>, // TODO rowids should be in filters
pub sort_property: String,
pub sort_order: SortOrder,
pub limit: Option<u64>,
pub offset: Option<u64>,
pub limit: Option<u32>,
pub offset: Option<u32>,
// pub filters: Option<Vec<Filter>>,
}
impl Default for GQLSearchArgs {
fn default() -> GQLSearchArgs {
GQLSearchArgs {
_type: None,
properties: Vec::new(),
rowids: None,
sort_property: "dateServerModified".to_string(),
sort_order: SortOrder::Asc,
limit: None,
offset: None,
}
}
}
pub struct GQLPropertiesResult {
pub rowid: Rowid,
pub properties: serde_json::Map<String, Value>,
......@@ -343,27 +329,29 @@ pub struct EdgeResult {
pub fn get_edges(
tx: &Tx,
source_id: Rowid,
item_id: Rowid,
edge_name: Option<&String>,
reverse: bool,
) -> Result<Vec<EdgeResult>> {
let mut query = format!("SELECT self, source, name, target FROM edges WHERE source = ?");
let mut query = if reverse {
"SELECT self, target, name, source FROM edges WHERE target = ?".to_owned()
} else {
"SELECT self, source, name, target FROM edges WHERE source = ?".to_owned()
};
if let Some(e) = edge_name {
query.push_str(&format!(" AND name = \"{e}\""));
}
let mut stmt = tx.prepare_cached(&query)?;
let mut rows = stmt.query(params![source_id])?;
// let mut rows = stmt.query(params![source_id])?;
// query.push_str(" ORDER BY rowid;");
// let mut stmt = tx.prepare_cached(&query)?;
// let mut rows = stmt.query(params![])?;
let mut rows = stmt.query(params![item_id])?;
let mut result = Vec::new();
while let Some(row) = rows.next()? {
let self_id = row.get(0)?;
let source = row.get(1)?;
let name = row.get(2)?;
let mut name = row.get(2)?;
if reverse {
name = format!("{}{:?}", "~", name);
}
let target = row.get(3)?;
result.push(EdgeResult {
self_id,
......@@ -381,7 +369,6 @@ pub fn search_gql_properties(
schema: &Schema,
) -> Result<Vec<GQLPropertiesResult>> {
// Returns results from a GQLSearchArgs, filtered, sorted, paginated, no edges.
let sql_query = query_from_gqlsearch(query, schema)?;
let mut stmt = tx
.prepare_cached(&sql_query)
......@@ -1227,8 +1214,8 @@ pub mod tests {
rowids: Some(rowids),
limit: Some(100),
offset: Some(0),
sort_property: String::from("content"),
..Default::default()
sort_property: "content".to_owned(),
sort_order: SortOrder::default(),
};
let schema = get_schema(&tx)?;
......
......@@ -118,6 +118,16 @@ impl<T> From<std::sync::PoisonError<T>> for Error {
}
}
impl From<std::num::TryFromIntError> for Error {
fn from(err: std::num::TryFromIntError) -> Error {
let msg = format!("Parsing error: {}", err);
Error {
code: StatusCode::BAD_REQUEST,
msg,
}
}
}
pub trait ErrorContext<T> {
fn context<F>(self, context_add: F) -> Result<T>
where
......
use crate::api_model::SortOrder;
use graphql_parser::query::parse_query;
use graphql_parser::query::Definition;
use graphql_parser::query::Field;
use graphql_parser::query::OperationDefinition;
use graphql_parser::query::Selection;
use graphql_parser::query::SelectionSet;
use graphql_parser::query::Text;
use graphql_parser::query::Value;
use std::convert::TryFrom;
use warp::http::status::StatusCode;
use crate::error::Error;
......@@ -12,18 +15,18 @@ use crate::error::Result;
use std::collections::HashMap;
#[derive(Debug)]
pub struct QueryASTNode<'a> {
pub struct QueryASTNode {
pub item_type: Option<String>,
pub arguments: HashMap<String, Value<'a, String>>,
pub arguments: Arguments,
pub properties: Vec<String>,
pub edges: HashMap<String, QueryASTNode<'a>>,
pub edges: HashMap<String, QueryASTNode>,
}
impl<'a> QueryASTNode<'a> {
impl QueryASTNode {
fn new() -> Self {
QueryASTNode {
item_type: None,
arguments: HashMap::new(),
arguments: Arguments::new(),
properties: Vec::new(),
edges: HashMap::new(),
}
......@@ -38,7 +41,9 @@ fn parse_query_recursive(selection_set: SelectionSet<String>) -> Result<QueryAST
result.properties.push(field.name);
} else {
let mut edge_ast_node = parse_query_recursive(field.selection_set)?;
edge_ast_node.arguments = field.arguments.into_iter().collect();
for (arg_name, arg_value) in field.arguments {
edge_ast_node.arguments.add_arg(arg_name, arg_value)?;
}
result.edges.insert(field.name, edge_ast_node);
}
}
......@@ -51,11 +56,11 @@ pub fn parse_graphql_query(query_string: &str) -> Result<QueryASTNode> {
// TODO error handling
let parsed = match parse_query::<String>(query_string) {
Ok(res) => res,
Err(_) => {
Err(err) => {
return Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "Invalid GraphQL query".to_owned(),
})
msg: format!("Invalid GraphQL query {}", err),
});
}
};
......@@ -82,7 +87,9 @@ pub fn parse_graphql_query(query_string: &str) -> Result<QueryASTNode> {
if let Selection::Field(field) = query {
let mut result = parse_query_recursive(field.selection_set)?;
result.item_type = Some(field.name);
result.arguments = field.arguments.into_iter().collect();
for (arg_name, arg_value) in field.arguments {
result.arguments.add_arg(arg_name, arg_value)?;
}
Ok(result)
} else {
panic!("Fragments are not supported")
......@@ -96,6 +103,122 @@ pub fn field_is_property(field: &Field<String>) -> bool {
field.selection_set.items.is_empty()
}
pub enum Filter {
EQ {property: String, value: String},
NE {property: String, value: String},
LT {property: String, value: String},
GT {property: String, value: String},
// Box needed, cant do recursive memory allocation
AND {left: Box<Filter>, right: Box<Filter>},
OR {left: Box<Filter>, right: Box<Filter>}
}
#[derive(Debug)]
pub struct Arguments {
pub limit: Option<u32>,
pub offset: Option<u32>,
pub sort_order: Option<SortOrder>,
pub sort_property: Option<String>,
//pub filters: Vec<Filter>,
}
impl Arguments {
fn new() -> Arguments {
Arguments {
limit: None,
offset: None,
sort_order: None,
sort_property: None,
}
}
fn add_arg<'a, T: Text<'a>>(
&mut self,
arg_name: String,
arg_value: Value<'a, T>,
) -> Result<()> {
match arg_name.as_str() {
"limit" => self.set_limit(arg_value)?,
// "filter" => self.set_filter(arg_value)?,
"offset" => self.set_offset(arg_value)?,
"order_asc" => self.set_order(arg_value, SortOrder::Asc)?,
"order_desc" => self.set_order(arg_value, SortOrder::Desc)?,
_ => {
return Err(Error {
code: StatusCode::BAD_REQUEST,
msg: format!("Unknown graphQL argument: {}", arg_name),
})
}
}
Ok(())
}
fn set_limit<'a, T: Text<'a>>(&mut self, arg_value: Value<'a, T>) -> Result<()> {
if self.limit.is_some() {
return Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "GraphQL error: multiple limits defined".to_owned(),
});
}
if let Value::Int(v) = arg_value {
let v = u32::try_from(v.as_i64().unwrap())?;
self.limit = Some(v);
Ok(())
} else {
Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "Invalid GraphQL query limit".to_owned(),
})
}
}
fn set_offset<'a, T: Text<'a>>(&mut self, arg_value: Value<'a, T>) -> Result<()> {
if self.offset.is_some() {
return Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "GraphQL error: multiple offsets defined".to_owned(),
});
}
if let Value::Int(v) = arg_value {
let v = u32::try_from(v.as_i64().unwrap())?;
self.offset = Some(v);
Ok(())
} else {
Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "Invalid GraphQL query offset".to_owned(),
})
}
}
fn set_order<'a, T: Text<'a>>(
&mut self,
arg_value: Value<'a, T>,
order: SortOrder,
) -> Result<()> {
if self.sort_order.is_some() | self.sort_property.is_some() {
return Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "GraphQL error: multiple orders defined".to_owned(),
});
}
if let Value::Enum(o) = arg_value {
self.sort_property = Some(o.as_ref().to_owned());
self.sort_order = Some(order);
Ok(())
} else {
Err(Error {
code: StatusCode::BAD_REQUEST,
msg: "Invalid GraphQL query order".to_owned(),
})
}
}
}
#[cfg(test)]
pub mod tests {
use super::*;
......@@ -103,6 +226,22 @@ pub mod tests {
#[test]
fn test_parse_query() -> Result<()> {
let test_query = "
query {
Message (limit: 1000, offset: 200 order_asc: id) {
id
}
}
";
let query_ast = parse_graphql_query(test_query)?;
println!("{:?}", query_ast.arguments);
Ok(())
}
#[test]
fn test_filters() -> Result<()> {
let test_query = "
query {
Message {
......
use crate::api_model::Bulk;
use std::time::Duration;
use std::time::Instant;
use crate::api_model::CreateEdge;
use crate::api_model::CreateItem;
use crate::api_model::DeleteEdgeBySourceTarget;
......@@ -30,13 +28,14 @@ use crate::schema::validate_property_name;
use crate::schema::Schema;
use crate::triggers;
use crate::triggers::SchemaAdditionChange;
use chacha20poly1305::Tag;
use log::info;
use rand::Rng;
use rusqlite::Transaction as Tx;
use serde_json::Value;
use std::collections::HashMap;
use std::str;
use std::time::Duration;
use std::time::Instant;
use warp::http::status::StatusCode;
pub fn get_project_version() -> String {
......@@ -453,47 +452,62 @@ fn gql_search_recursive(
query_ast: &QueryASTNode,
schema: &Schema,
item_rowids: Option<Vec<Rowid>>,
level: u32
level: u32,
) -> Result<Vec<Value>> {
// TODO limit, sorting, filters
let sort_property = query_ast
.arguments
.sort_property
.to_owned()
.unwrap_or_else(|| "dateServerModified".to_owned());
let sort_order = query_ast
.arguments
.sort_order
.to_owned()
.unwrap_or_default();
let query = GQLSearchArgs {
_type: query_ast.item_type.to_owned(),
properties: query_ast.properties.to_owned(),
rowids: item_rowids,
..Default::default()
sort_property,
sort_order,
limit: query_ast.arguments.limit,
offset: query_ast.arguments.offset,
};
let now = Instant::now();
let items = database_api::search_gql_properties(tx, &query, schema)?;
let mut result = Vec::new();
if level== 1 {
let props: Vec<serde_json::Map<String, Value>> = items.iter().map(|x| x.properties.clone()).collect();
// println!("{:?}", props);
// println!("{:?}", query.properties);
// println!("{:?}", query.rowids);
if level == 0 {
let elapsed = now.elapsed();
// println!("Elapsed: {:.2?}", elapsed);
}
let now = Instant::now();
let mut total_edges = Duration::new(0,0);
let mut total_edges_items = Duration::new(0,0);
let mut total_edges = Duration::new(0, 0);
let mut total_edges_items = Duration::new(0, 0);
for item in items.into_iter() {
let mut item_json = item.properties;
for (edge_name, edge_item_ast) in &query_ast.edges {
let now_edges = Instant::now();
let edges = database_api::get_edges(tx, item.rowid, Some(edge_name))?;
if level==0{
let edges = if edge_name.contains('~') {
let normalized = &edge_name[1..edge_name.len()];
database_api::get_edges(tx, item.rowid, Some(&normalized.to_string()), true)?
} else {
database_api::get_edges(tx, item.rowid, Some(edge_name), false)?
};
if level == 0 {
let elapsed = now_edges.elapsed();
total_edges = total_edges + elapsed;
total_edges += elapsed;
}
let target_ids:Option<Vec<Rowid>>= Some(edges.iter().map(|e| e.target).collect());
let now_edge_items = Instant::now();
// if target_ids.is_some() && &target_ids.unwrap().len() > &0 {
let edge_jsons = if let Some(_target_ids) = target_ids {
if _target_ids.len() > 0 {
gql_search_recursive(tx, edge_item_ast, schema, Some(_target_ids), level+1)?
if !_target_ids.is_empty() {
gql_search_recursive(tx, edge_item_ast, schema, Some(_target_ids), level + 1)?
} else {
Vec::new()
}
......@@ -502,11 +516,10 @@ fn gql_search_recursive(
};
item_json.insert(edge_name.to_owned(), edge_jsons.into());
if level==0{
if level == 0 {
let elapsed = now_edge_items.elapsed();
total_edges_items = total_edges_items + elapsed;
total_edges_items += elapsed;
}
}
result.push(item_json.into());
}
......@@ -843,13 +856,26 @@ mod tests {
let items_with_friends = data
.iter()
.map(|x| x.get("friend1"))
.filter_map(|x| x)
.filter(|x| x.as_array().unwrap().len() > 0);
tx.commit();
.filter_map(|x| x.get("friend1"))
.filter(|x| !x.as_array().unwrap().is_empty());
// print!("{:}", serde_json::to_string_pretty(&json!(data)).unwrap());
assert_eq!(items_with_friends.count(), 100);
let query = "
query {
Person1 (limit: 10, offset: 5) {
id
friend1 {
id
}
}
}"
.to_owned();
let res = graphql(&tx, &schema, query);
let data = res.unwrap();
let data = data.get("data").unwrap();
assert_eq!(data.len(), 10);
}
#[test]
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment