Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: Feature/2019 03 upload s3 2 #9

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,7 @@ GOOGLE_OAUTH_CLIENT_SECRET=aBaa0GhsF0exEXAMPLEw6ABw
ROOT_HOST=https://localhost:8088
S3_ACCESS_KEY_ID=minioaccesskey
S3_SECRET_ACCESS_KEY=miniosecretaccesskey
S3_URL=http://localhost:9000
# Bucket prefix might be a shortened version of your hostname in case you are
# testing from multiple locations
S3_BUCKET_PREFIX=local
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ diesel-derive-enum = { version = "0.4.4", features = ["postgres"] }
diesel-derive-newtype = "0.1.2"
listenfd = "0.3"
redis-async = "^0.4"
ring = "0.13.5"
serde = "^1.0"
serde_json = "^1.0"
serde_derive = "^1.0"
Expand Down
3 changes: 2 additions & 1 deletion diesel.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# For documentation on how to configure this file,
# see diesel.rs/guides/configuring-diesel-cli
# see https://diesel.rs/guides/configuring-diesel-cli

[print_schema]
file = "src/db/schema.rs"
patch_file = "src/schema.patch"
7 changes: 7 additions & 0 deletions docs/migration_ref/2019-03-06-162437_add_forms/down.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
-- This file should undo anything in `up.sql`
DROP TABLE form_nested_forms;
DROP FUNCTION form_has_property;
DROP TABLE form_default_choice_properties;
DROP TABLE form_properties;
DROP FUNCTION form_properties_ord;
DROP TABLE forms;
95 changes: 95 additions & 0 deletions docs/migration_ref/2019-03-06-162437_add_forms/up.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
CREATE TABLE forms (
id BIGINT PRIMARY KEY DEFAULT id_generator(),
handle TEXT UNIQUE,
title TEXT,
listed BOOLEAN NOT NULL,
created_by BIGINT NOT NULL DEFAULT 2 REFERENCES users(id) ON DELETE SET DEFAULT,
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
);

CREATE FUNCTION form_properties_ord() RETURNS REAL AS $$
BEGIN RETURN 1.0::REAL; END;
$$ LANGUAGE plpgsql;

CREATE TABLE form_properties (
form_id BIGINT NOT NULL REFERENCES forms(id) ON DELETE CASCADE,
property_id BIGINT NOT NULL REFERENCES properties(id) ON DELETE CASCADE,
ord REAL NOT NULL DEFAULT form_properties_ord(),
description TEXT,
PRIMARY KEY (form_id, property_id)
);

CREATE TABLE form_default_choice_properties (
form_id BIGINT NOT NULL REFERENCES forms(id) ON DELETE CASCADE,
property_id BIGINT NOT NULL REFERENCES properties(id) ON DELETE CASCADE,
property_value_choice_id BIGINT NOT NULL REFERENCES property_value_choices(id) ON DELETE CASCADE,
PRIMARY KEY (form_id, property_id, property_value_choice_id),
CONSTRAINT "Property default must be choice type"
CHECK (property_type_is(property_id, 'choice'))
);

CREATE FUNCTION form_has_property(form_id bigint, property_id bigint) RETURNS BOOLEAN AS $$
SELECT COUNT(*) = 1 FROM form_properties
WHERE form_id = form_id AND property_id = property_id;
$$ LANGUAGE SQL;

CREATE TABLE form_nested_forms (
form_id BIGINT NOT NULL REFERENCES forms(id) ON DELETE CASCADE,
property_id BIGINT NOT NULL REFERENCES properties(id) ON DELETE CASCADE,
nested_form_id BIGINT NOT NULL REFERENCES forms(id) ON DELETE CASCADE,
-- Notice that there can only be one nested form per property
PRIMARY KEY (form_id, property_id),
CONSTRAINT "Property for nested form must be relation type"
CHECK (property_type_is(property_id, 'relation')),
CONSTRAINT "Property must exist on parent form"
CHECK (form_has_property(form_id, property_id))
);

-- Make sure that we actually put this ord in the back
CREATE OR REPLACE FUNCTION form_properties_ord() RETURNS real AS $$
SELECT (COALESCE(MAX(ord), 1) + 1)::REAL FROM form_properties;
$$ LANGUAGE SQL;


-- Seed values for first form

-- 1. Adam creates a new collection form
-- -- Create form: Tokyo Proposals
INSERT INTO forms
(id, handle, title, listed, created_by)
VALUES
(1901, 'test-jp-proposals', 'Tokyo Proposals 🇯🇵🏯', TRUE, 901);
-- -- Create collection: Tokyo Proposals (Same name)
INSERT INTO property_value_choices
(id, property_id, display, created_by)
VALUES
(2001, 20, 'Tokyo Proposals 🇯🇵🏯', 901);
-- -- Add collection choice property as a default property value
INSERT INTO form_default_choice_properties
(form_id, property_id, property_value_choice_id)
VALUES
(1901, 20, 2001);
-- 2. Adam wants to people to add any context necessary to this form as text
-- 2.1 Adam must create a property which does not yet exist, he'll call it "Context"/text
-- -- Create Context property
INSERT INTO properties
(id, display, property_type, created_by)
VALUES
(901961, 'Context', 'text', 901);
-- -- Add property to form
INSERT INTO form_properties
(form_id, property_id, description)
VALUES
(1901, 901961, 'What else should we know about this proposal and how it was used?');
-- 3. Adam wants people to also submit any source files used to create the proposal
-- 3.1 Adam must create a property which does not yet exist, he'll call it "Source files"/relation
-- -- Create "Source files" relation property
INSERT INTO properties
(id, display, property_type, created_by)
VALUES
(901981, 'Source files', 'relation', 901);
-- -- Add property to form
INSERT INTO form_properties
(form_id, property_id, description)
VALUES
(1901, 901981, 'Please share any source files used for this proposal ❤️');
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
ALTER TABLE objects
DROP COLUMN level;
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
ALTER TABLE objects
ADD COLUMN level INTEGER NOT NULL DEFAULT 0;

ALTER TABLE objects
ALTER COLUMN level DROP DEFAULT;
22 changes: 12 additions & 10 deletions src/app/mod.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
use askama::Template; // bring trait in scope

use actix_web::middleware::session::{SessionStorage, RequestSession};
use actix::{Actor, SyncArbiter};
use actix_redis::{RedisActor, RedisSessionBackend};
use actix_web::middleware::session::{RequestSession, SessionStorage};
use actix_web::{http, Error};
use actix_web::{middleware, server, App, HttpRequest, HttpResponse};
use futures::Future;
use actix::{Actor, SyncArbiter};
use actix_redis::{RedisSessionBackend, RedisActor};

use super::db::DbExecutor;
use super::logging;
use super::sessions;
use super::db::DbExecutor;
pub use super::State;

use sessions::flash::SessionFlash;
use sessions::session_manager::SessionManager;
use sessions::session_routes::{self, is_signed_in_guard, SigninState};
use sessions::flash::SessionFlash; // enable inserting and applying flash messages to the page
use sessions::session_routes::{self, is_signed_in_guard, SigninState}; // enable inserting and applying flash messages to the page

pub mod templates;
mod upload;
Expand Down Expand Up @@ -44,8 +44,9 @@ pub fn start() {
let store_actor = ObjectStore::new_with_s3_credentials(
dotenv!("S3_ACCESS_KEY_ID"),
dotenv!("S3_SECRET_ACCESS_KEY"),
dotenv!("S3_BUCKET_PREFIX"),
)
.expect("No TLS errors starting store_actor");
.expect("No TLS errors or Bucket creation errors starting store_actor");

let store_addr = store_actor.start();

Expand Down Expand Up @@ -75,8 +76,10 @@ pub fn start() {
.cookie_name("sess"),
))
.resource("/example", |r| r.f(upload_example))
.resource("/upload", |r| {
r.method(http::Method::POST).with(upload::upload)
.scope("/api", |scope: actix_web::Scope<State>| {
scope.nested("/v0", |scope: actix_web::Scope<State>| {
scope.nested("/upload", upload::upload_scope)
})
})
.scope("/login", session_routes::login_scope)
.resource("/logout", |r| r.f(session_routes::logout_endpoint))
Expand All @@ -98,7 +101,6 @@ pub fn start() {
server.run();
}


fn index(req: &HttpRequest<State>) -> Box<Future<Item = HttpResponse, Error = Error>> {
use templates::*;
let req_session = req.session();
Expand Down
161 changes: 94 additions & 67 deletions src/app/upload.rs
Original file line number Diff line number Diff line change
@@ -1,74 +1,64 @@
use futures::future;
use futures::{Future, Stream};
use futures::{future, Future, IntoFuture};
use ring::digest;

use actix_web::{
dev, error, multipart, Error, FutureResponse, HttpMessage, HttpRequest, HttpResponse,
};

use std::fs;
use std::io::Write;
use actix_web::{error, http, FutureResponse, HttpMessage, HttpRequest, HttpResponse};

use crate::user::User;
use crate::State;

/// from payload, save file
pub fn save_file(field: multipart::Field<dev::Payload>) -> Box<Future<Item = i64, Error = Error>> {
use std::ffi::OsStr;
use std::path::Path;
#[derive(Serialize)]
struct URL {
url: String,
key: String,
}

let filename = field
.content_disposition()
.and_then(|cd| cd.get_filename().map(|st| st.to_string()))
.unwrap_or("upload".to_string());
let fileext = Path::new(&filename)
.extension()
.and_then(OsStr::to_str)
.map_or("".to_string(), |ext| format!(".{}", ext));
pub fn get_url(req: &HttpRequest<State>) -> FutureResponse<HttpResponse> {
use crate::object::store;
use crate::object::ObjectStore;
use actix::Addr;
use actix_web::error;
let store_actor: Addr<ObjectStore> = req.state().store.clone();

use crate::sessions::UserSession;
use crate::{is_signed_in_guard, SigninState};

println!(
"Saving file: filename {:?}; extension: {:?}",
filename, fileext
);
let file_path_string = format!("./static/uploads/{}", filename);
let mut file = match fs::File::create(file_path_string) {
Ok(file) => file,
Err(e) => return Box::new(future::err(error::ErrorInternalServerError(e))),
};
Box::new(
field
.fold(0i64, move |acc, bytes| {
let rt = file
.write_all(bytes.as_ref())
.map(|_| acc + bytes.len() as i64)
.map_err(|e| {
println!("file.write_all failed: {:?}", e);
error::MultipartError::Payload(error::PayloadError::Io(e))
});
future::result(rt)
is_signed_in_guard(&req)
.and_then(|state| match state {
SigninState::Valid(session) => Ok(session),
_ => Err(error::ErrorForbidden("Must log in to upload")),
})
.map_err(|e| {
println!("save_file failed, {:?}", e);
error::ErrorInternalServerError(e)
.and_then(move |session: UserSession| {
store_actor
.send(store::GetPendingPutUrl {
user_id: session.person.user_id,
})
.map_err(|_| error::ErrorBadRequest("Failed to get put url"))
.and_then(|res| res)
.map_err(|_| error::ErrorBadRequest("Failed to get put url"))
})
.map(|put_url: store::PendingPutUrl| {
HttpResponse::Ok()
.header(actix_web::http::header::CONTENT_TYPE, "application/json")
.json(URL {
url: put_url.url,
key: put_url.key,
})
}),
)
}

pub fn handle_multipart_item(
item: multipart::MultipartItem<dev::Payload>,
) -> Box<Stream<Item = i64, Error = Error>> {
match item {
multipart::MultipartItem::Field(field) => Box::new(save_file(field).into_stream()),
multipart::MultipartItem::Nested(mp) => Box::new(
mp.map_err(error::ErrorInternalServerError)
.map(handle_multipart_item)
.flatten(),
),
}
#[derive(Deserialize)]
struct PostCompleteRequestJSON {
key: String,
}

pub fn upload(req: HttpRequest<State>) -> FutureResponse<HttpResponse> {
pub fn post_complete(req: &HttpRequest<State>) -> FutureResponse<HttpResponse> {
use crate::object::store;
use crate::object::ObjectStore;
use actix::Addr;
use actix_web::error;

let store_actor: Addr<ObjectStore> = req.state().store.clone();
use crate::sessions::UserSession;
use crate::{is_signed_in_guard, SigninState};

Expand All @@ -78,17 +68,54 @@ pub fn upload(req: HttpRequest<State>) -> FutureResponse<HttpResponse> {
SigninState::Valid(session) => Ok(session),
_ => Err(error::ErrorForbidden("Must log in to upload")),
})
.and_then(move |session: UserSession| {
req.multipart()
.map_err(error::ErrorInternalServerError)
.map(handle_multipart_item)
.flatten()
.collect()
.map(|sizes| HttpResponse::Ok().json(sizes))
.map_err(|e| {
println!("failed: {}", e);
e
})
}),
.join(req.json::<PostCompleteRequestJSON>().map_err(|a| a.into()))
.and_then(
move |(session, req): (UserSession, PostCompleteRequestJSON)| {
ensure_ownership_of_pending(&session.person, &req.key)
.into_future()
.and_then(move |_| {
store_actor
.send(store::FinalizeObject { key: req.key })
.map_err(|_| error::ErrorBadRequest("Failed to finalize object"))
.and_then(|res| res)
.map_err(|_| error::ErrorBadRequest("Failed to finalize object"))
.map(move |stored_object: store::StoredObject| {
info!(
"Uploaded and retrieved: {} => {}",
session.person.user_id, stored_object.key
);
HttpResponse::Ok()
.header(
actix_web::http::header::CONTENT_TYPE,
"application/json",
)
.json(URL {
url: stored_object.bucket,
key: stored_object.key,
})
})
})
},
),
)
}

/// API calls related to upload functionality
pub fn upload_scope(scope: actix_web::Scope<State>) -> actix_web::Scope<State> {
scope
.resource("/url", |r| r.method(http::Method::GET).f(get_url))
.resource("/complete", |r| {
r.method(http::Method::POST).f(post_complete)
})
}

fn ensure_ownership_of_pending<U: User>(user: &U, key: &str) -> Result<(), error::Error> {
let user_id_str = format!("{}", user.id());
if key.contains(&user_id_str) {
Ok(())
} else {
Err(error::ErrorUnauthorized(
"Needs ownership of pending object",
))
}
}
Loading