Commit 5f716945 authored by Vasili Novikov's avatar Vasili Novikov
Browse files

Merge branch 'separate-schema-file' into 'dev'

Separate schema file from compilation

See merge request memri/pod!166
parents 3963df38 baaaebac
Showing with 8 additions and 4 deletions
+8 -4
......@@ -25,7 +25,7 @@ RUN set -x && \
#### After the dependencies are built, copy the sources and build the real thing.
COPY res res
COPY res/migrations res/migrations
COPY build.rs build.rs
COPY src src
COPY benches benches
......@@ -38,6 +38,7 @@ RUN cargo build --release && mv target/release/pod ./ && rm -rf target
FROM debian:buster-slim
COPY --from=cargo-build /usr/bin/docker /usr/bin/docker
COPY --from=cargo-build /usr/src/pod/pod pod
COPY res/autogenerated_database_schema.json res/autogenerated_database_schema.json
RUN apt-get update && apt-get install -y libsqlcipher-dev && rm -rf /var/lib/apt/lists/*
# Check that library versions match (sqlcipher, libc, etc)
......
......@@ -3,3 +3,5 @@
pub const DATABASE_DIR: &str = "./data/db";
pub const MEDIA_DIR: &str = "./data/media";
pub const SCHEMA_JSON_FILE: &str = "res/autogenerated_database_schema.json";
......@@ -40,7 +40,10 @@ pub enum SchemaPropertyType {
lazy_static! {
static ref SCHEMA_STRUCT: DatabaseSchema = {
let schema: DatabaseSchema = serde_json::from_slice(SCHEMA_JSON_BYTES)
let file = crate::constants::SCHEMA_JSON_FILE;
let file = std::fs::read(file)
.unwrap_or_else(|err| panic!("Failed to read schema file {}, {}", file, err));
let schema: DatabaseSchema = serde_json::from_slice(&file)
.expect("Failed to parse autogenerated_database_schema to JSON");
validate_schema(&schema).unwrap_or_else(|err| panic!("Schema validation failed, {}", err));
schema
......@@ -103,8 +106,6 @@ lazy_static! {
};
}
const SCHEMA_JSON_BYTES: &[u8] = include_bytes!("../res/autogenerated_database_schema.json");
pub fn migrate(conn: &Connection) -> Result<(), String> {
info!("Initializing database schema (additional columns)");
let schema: &DatabaseSchema = &SCHEMA_STRUCT;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment