mirror of
https://github.com/redlib-org/redlib.git
synced 2025-06-07 15:17:47 +00:00
Move from Actix Web to Tide (#99)
* Initial commit * Port posts * Pinpoint Tide Bug * Revert testing * Add basic sub support * Unwrap nested routes * Front page & sync templates * Port remaining functions * Log request errors * Clean main and settings * Handle /w/ requests * Create template() util * Reduce caching time to 30s * Fix subscription redirects * Handle frontpage sorting
This commit is contained in:
parent
402b3149e1
commit
ebbdd7185f
15 changed files with 1283 additions and 1189 deletions
104
src/subreddit.rs
104
src/subreddit.rs
|
@ -1,7 +1,7 @@
|
|||
// CRATES
|
||||
use crate::utils::*;
|
||||
use actix_web::{cookie::Cookie, HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use tide::{http::Cookie, Request, Response};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
// STRUCTS
|
||||
|
@ -25,14 +25,14 @@ struct WikiTemplate {
|
|||
}
|
||||
|
||||
// SERVICES
|
||||
pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
pub async fn page(req: Request<()>) -> tide::Result {
|
||||
// Build Reddit API path
|
||||
let subscribed = cookie(&req, "subscriptions");
|
||||
let front_page = cookie(&req, "front_page");
|
||||
let sort = req.match_info().get("sort").unwrap_or("hot").to_string();
|
||||
let sort = req.param("sort").unwrap_or_else(|_| req.param("id").unwrap_or("hot")).to_string();
|
||||
|
||||
let sub = req
|
||||
.match_info()
|
||||
.get("sub")
|
||||
.param("sub")
|
||||
.map(String::from)
|
||||
.unwrap_or(if front_page == "default" || front_page.is_empty() {
|
||||
if subscribed.is_empty() {
|
||||
|
@ -44,7 +44,7 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
|||
front_page.to_owned()
|
||||
});
|
||||
|
||||
let path = format!("/r/{}/{}.json?{}", sub, sort, req.query_string());
|
||||
let path = format!("/r/{}/{}.json?{}&raw_json=1", sub, sort, req.url().query().unwrap_or_default());
|
||||
|
||||
match fetch_posts(&path, String::new()).await {
|
||||
Ok((posts, after)) => {
|
||||
|
@ -54,7 +54,7 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
|||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else if sub == subscribed {
|
||||
// Subscription feed
|
||||
if req.path().starts_with("/r/") {
|
||||
if req.url().path().starts_with("/r/") {
|
||||
subreddit(&sub).await.unwrap_or_default()
|
||||
} else {
|
||||
Subreddit::default()
|
||||
|
@ -69,42 +69,55 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
|||
Subreddit::default()
|
||||
};
|
||||
|
||||
let s = SubredditTemplate {
|
||||
template(SubredditTemplate {
|
||||
sub,
|
||||
posts,
|
||||
sort: (sort, param(&path, "t")),
|
||||
ends: (param(&path, "after"), after),
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
})
|
||||
}
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// Sub or unsub by setting subscription cookie using response "Set-Cookie" header
|
||||
pub async fn subscriptions(req: HttpRequest) -> HttpResponse {
|
||||
let mut res = HttpResponse::Found();
|
||||
pub async fn subscriptions(req: Request<()>) -> tide::Result {
|
||||
let sub = req.param("sub").unwrap_or_default().to_string();
|
||||
let query = req.url().query().unwrap_or_default().to_string();
|
||||
let action: Vec<String> = req.url().path().split('/').map(String::from).collect();
|
||||
|
||||
let sub = req.match_info().get("sub").unwrap_or_default().to_string();
|
||||
let action = req.match_info().get("action").unwrap_or_default().to_string();
|
||||
let mut sub_list = prefs(req.to_owned()).subs;
|
||||
let mut sub_list = prefs(req).subs;
|
||||
|
||||
// Modify sub list based on action
|
||||
if action == "subscribe" && !sub_list.contains(&sub) {
|
||||
if action.contains(&"subscribe".to_string()) && !sub_list.contains(&sub) {
|
||||
sub_list.push(sub.to_owned());
|
||||
sub_list.sort_by_key(|a| a.to_lowercase());
|
||||
} else if action == "unsubscribe" {
|
||||
sub_list.sort_by_key(|a| a.to_lowercase())
|
||||
} else if action.contains(&"unsubscribe".to_string()) {
|
||||
sub_list.retain(|s| s != &sub);
|
||||
}
|
||||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing from outside sidebar
|
||||
let redirect_path = param(format!("/?{}", query).as_str(), "redirect");
|
||||
let path = if !redirect_path.is_empty() {
|
||||
format!("/{}/", redirect_path)
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
|
||||
let mut res = Response::builder(302)
|
||||
.content_type("text/html")
|
||||
.header("Location", path.to_owned())
|
||||
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path))
|
||||
.build();
|
||||
|
||||
// Delete cookie if empty, else set
|
||||
if sub_list.is_empty() {
|
||||
res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish());
|
||||
// res.del_cookie(&Cookie::build("subscriptions", "").path("/").finish());
|
||||
res.remove_cookie(Cookie::build("subscriptions", "").path("/").finish());
|
||||
} else {
|
||||
res.cookie(
|
||||
res.insert_cookie(
|
||||
Cookie::build("subscriptions", sub_list.join("+"))
|
||||
.path("/")
|
||||
.http_only(true)
|
||||
|
@ -113,38 +126,21 @@ pub async fn subscriptions(req: HttpRequest) -> HttpResponse {
|
|||
);
|
||||
}
|
||||
|
||||
// Redirect back to subreddit
|
||||
// check for redirect parameter if unsubscribing from outside sidebar
|
||||
let redirect_path = param(&req.uri().to_string(), "redirect");
|
||||
let path = if !redirect_path.is_empty() && redirect_path.starts_with('/') {
|
||||
redirect_path
|
||||
} else {
|
||||
format!("/r/{}", sub)
|
||||
};
|
||||
|
||||
res
|
||||
.content_type("text/html")
|
||||
.set_header("Location", path.to_owned())
|
||||
.body(format!("Redirecting to <a href=\"{0}\">{0}</a>...", path))
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn wiki(req: HttpRequest) -> HttpResponse {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com").to_string();
|
||||
let page = req.match_info().get("page").unwrap_or("index").to_string();
|
||||
pub async fn wiki(req: Request<()>) -> tide::Result {
|
||||
let sub = req.param("sub").unwrap_or("reddit.com").to_string();
|
||||
let page = req.param("page").unwrap_or("index").to_string();
|
||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
|
||||
match request(path).await {
|
||||
Ok(res) => {
|
||||
let s = WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||
page,
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Ok(res) => template(WikiTemplate {
|
||||
sub,
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||
page,
|
||||
prefs: prefs(req),
|
||||
}),
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
@ -163,8 +159,14 @@ async fn subreddit(sub: &str) -> Result<Subreddit, String> {
|
|||
let active: i64 = res["data"]["accounts_active"].as_u64().unwrap_or_default() as i64;
|
||||
|
||||
// Fetch subreddit icon either from the community_icon or icon_img value
|
||||
let community_icon: &str = res["data"]["community_icon"].as_str().map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
|
||||
let icon = if community_icon.is_empty() { val(&res, "icon_img") } else { community_icon.to_string() };
|
||||
let community_icon: &str = res["data"]["community_icon"]
|
||||
.as_str()
|
||||
.map_or("", |s| s.split('?').collect::<Vec<&str>>()[0]);
|
||||
let icon = if community_icon.is_empty() {
|
||||
val(&res, "icon_img")
|
||||
} else {
|
||||
community_icon.to_string()
|
||||
};
|
||||
|
||||
let sub = Subreddit {
|
||||
name: val(&res, "display_name"),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue