mirror of
https://github.com/redlib-org/redlib.git
synced 2025-04-20 13:59:15 +00:00
Merge branch 'main' into patch-1
This commit is contained in:
commit
0f9eed302a
20 changed files with 555 additions and 487 deletions
751
Cargo.lock
generated
751
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
12
Cargo.toml
12
Cargo.toml
|
@ -9,6 +9,7 @@ authors = [
|
|||
"spikecodes <19519553+spikecodes@users.noreply.github.com>",
|
||||
]
|
||||
edition = "2021"
|
||||
default-run = "redlib"
|
||||
|
||||
[dependencies]
|
||||
rinja = { version = "0.3.4", default-features = false }
|
||||
|
@ -16,13 +17,14 @@ cached = { version = "0.51.3", features = ["async"] }
|
|||
clap = { version = "4.4.11", default-features = false, features = [
|
||||
"std",
|
||||
"env",
|
||||
"derive",
|
||||
] }
|
||||
regex = "1.10.2"
|
||||
serde = { version = "1.0.193", features = ["derive"] }
|
||||
cookie = "0.18.0"
|
||||
futures-lite = "2.2.0"
|
||||
hyper = { version = "0.14.28", features = ["full"] }
|
||||
hyper-rustls = "0.24.2"
|
||||
hyper-rustls = { version = "0.24.2", features = [ "http2" ] }
|
||||
percent-encoding = "2.3.1"
|
||||
route-recognizer = "0.3.1"
|
||||
serde_json = "1.0.108"
|
||||
|
@ -56,3 +58,11 @@ sealed_test = "1.0.0"
|
|||
codegen-units = 1
|
||||
lto = true
|
||||
strip = "symbols"
|
||||
|
||||
[[bin]]
|
||||
name = "redlib"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "scraper"
|
||||
path = "src/scraper/main.rs"
|
||||
|
|
|
@ -4,7 +4,7 @@ use futures_lite::future::block_on;
|
|||
use futures_lite::{future::Boxed, FutureExt};
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::header::HeaderValue;
|
||||
use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri};
|
||||
use hyper::{body, body::Buf, header, Body, Client, Method, Request, Response, Uri};
|
||||
use hyper_rustls::HttpsConnector;
|
||||
use libflate::gzip;
|
||||
use log::{error, trace, warn};
|
||||
|
@ -30,10 +30,10 @@ const REDDIT_SHORT_URL_BASE_HOST: &str = "redd.it";
|
|||
const ALTERNATIVE_REDDIT_URL_BASE: &str = "https://www.reddit.com";
|
||||
const ALTERNATIVE_REDDIT_URL_BASE_HOST: &str = "www.reddit.com";
|
||||
|
||||
pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| {
|
||||
let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build();
|
||||
client::Client::builder().build(https)
|
||||
});
|
||||
pub static HTTPS_CONNECTOR: Lazy<HttpsConnector<HttpConnector>> =
|
||||
Lazy::new(|| hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http2().build());
|
||||
|
||||
pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| Client::builder().build::<_, Body>(HTTPS_CONNECTOR.clone()));
|
||||
|
||||
pub static OAUTH_CLIENT: Lazy<ArcSwap<Oauth>> = Lazy::new(|| {
|
||||
let client = block_on(Oauth::new());
|
||||
|
@ -154,7 +154,7 @@ async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String
|
|||
let parsed_uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
|
||||
|
||||
// Build the hyper client from the HTTPS connector.
|
||||
let client: Client<_, Body> = CLIENT.clone();
|
||||
let client: &Lazy<Client<_, Body>> = &CLIENT;
|
||||
|
||||
let mut builder = Request::get(parsed_uri);
|
||||
|
||||
|
@ -216,7 +216,7 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
|
|||
let url = format!("{base_path}{path}");
|
||||
|
||||
// Construct the hyper client from the HTTPS connector.
|
||||
let client: Client<_, Body> = CLIENT.clone();
|
||||
let client: &Lazy<Client<_, Body>> = &CLIENT;
|
||||
|
||||
let (token, vendor_id, device_id, user_agent, loid) = {
|
||||
let client = OAUTH_CLIENT.load_full();
|
||||
|
|
|
@ -85,7 +85,7 @@ fn info_html(req: &Request<Body>) -> Result<Response<Body>, Error> {
|
|||
pub struct InstanceInfo {
|
||||
package_name: String,
|
||||
crate_version: String,
|
||||
git_commit: String,
|
||||
pub git_commit: String,
|
||||
deploy_date: String,
|
||||
compile_mode: String,
|
||||
deploy_unix_ts: i64,
|
||||
|
|
13
src/lib.rs
Normal file
13
src/lib.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
pub mod client;
|
||||
pub mod config;
|
||||
pub mod duplicates;
|
||||
pub mod instance_info;
|
||||
pub mod oauth;
|
||||
pub mod oauth_resources;
|
||||
pub mod post;
|
||||
pub mod search;
|
||||
pub mod server;
|
||||
pub mod settings;
|
||||
pub mod subreddit;
|
||||
pub mod user;
|
||||
pub mod utils;
|
54
src/main.rs
54
src/main.rs
|
@ -2,35 +2,21 @@
|
|||
#![forbid(unsafe_code)]
|
||||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// Reference local files
|
||||
mod config;
|
||||
mod duplicates;
|
||||
mod instance_info;
|
||||
mod oauth;
|
||||
mod oauth_resources;
|
||||
mod post;
|
||||
mod search;
|
||||
mod settings;
|
||||
mod subreddit;
|
||||
mod user;
|
||||
mod utils;
|
||||
|
||||
// Import Crates
|
||||
use cached::proc_macro::cached;
|
||||
use clap::{Arg, ArgAction, Command};
|
||||
use std::str::FromStr;
|
||||
|
||||
use futures_lite::FutureExt;
|
||||
use hyper::Uri;
|
||||
use hyper::{header::HeaderValue, Body, Request, Response};
|
||||
|
||||
mod client;
|
||||
use client::{canonical_path, proxy};
|
||||
use log::info;
|
||||
use once_cell::sync::Lazy;
|
||||
use server::RequestExt;
|
||||
use utils::{error, redirect, ThemeAssets};
|
||||
use redlib::client::{canonical_path, proxy, CLIENT};
|
||||
use redlib::server::{self, RequestExt};
|
||||
use redlib::utils::{error, redirect, ThemeAssets};
|
||||
use redlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user};
|
||||
|
||||
use crate::client::OAUTH_CLIENT;
|
||||
|
||||
mod server;
|
||||
use redlib::client::OAUTH_CLIENT;
|
||||
|
||||
// Create Services
|
||||
|
||||
|
@ -232,6 +218,11 @@ async fn main() {
|
|||
app
|
||||
.at("/highlighted.js")
|
||||
.get(|_| resource(include_str!("../static/highlighted.js"), "text/javascript", false).boxed());
|
||||
app
|
||||
.at("/check_update.js")
|
||||
.get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed());
|
||||
|
||||
app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed());
|
||||
|
||||
// Proxy media through Redlib
|
||||
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
|
||||
|
@ -389,3 +380,22 @@ async fn main() {
|
|||
eprintln!("Server error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn proxy_commit_info() -> Result<Response<Body>, String> {
|
||||
Ok(
|
||||
Response::builder()
|
||||
.status(200)
|
||||
.header("content-type", "application/atom+xml")
|
||||
.body(Body::from(fetch_commit_info().await))
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}
|
||||
|
||||
#[cached(time = 600)]
|
||||
async fn fetch_commit_info() -> String {
|
||||
let uri = Uri::from_str("https://github.com/redlib-org/redlib/commits/main.atom").expect("Invalid URI");
|
||||
|
||||
let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body();
|
||||
|
||||
hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect()
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ impl Oauth {
|
|||
trace!("Sending token request...");
|
||||
|
||||
// Send request
|
||||
let client: client::Client<_, Body> = CLIENT.clone();
|
||||
let client: &once_cell::sync::Lazy<client::Client<_, Body>> = &CLIENT;
|
||||
let resp = client.request(request).await.ok()?;
|
||||
|
||||
trace!("Received response with status {} and length {:?}", resp.status(), resp.headers().get("content-length"));
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// CRATES
|
||||
use crate::client::json;
|
||||
use crate::config::get_setting;
|
||||
|
|
75
src/scraper/main.rs
Normal file
75
src/scraper/main.rs
Normal file
|
@ -0,0 +1,75 @@
|
|||
use std::{fmt::Display, io::Write};
|
||||
|
||||
use clap::{Parser, ValueEnum};
|
||||
use redlib::utils::Post;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "my_cli")]
|
||||
#[command(about = "A simple CLI example", long_about = None)]
|
||||
struct Cli {
|
||||
#[arg(short = 's', long = "sub")]
|
||||
sub: String,
|
||||
|
||||
#[arg(short = 'c', long = "count")]
|
||||
count: usize,
|
||||
|
||||
#[arg(long = "sort")]
|
||||
sort: SortOrder,
|
||||
|
||||
#[arg(short = 'f', long = "format", value_enum)]
|
||||
format: Format,
|
||||
#[arg(short = 'o', long = "output")]
|
||||
output: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, ValueEnum)]
|
||||
enum SortOrder {
|
||||
Hot,
|
||||
Rising,
|
||||
New,
|
||||
Top,
|
||||
Controversial,
|
||||
}
|
||||
|
||||
impl Display for SortOrder {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SortOrder::Hot => write!(f, "hot"),
|
||||
SortOrder::Rising => write!(f, "rising"),
|
||||
SortOrder::New => write!(f, "new"),
|
||||
SortOrder::Top => write!(f, "top"),
|
||||
SortOrder::Controversial => write!(f, "controversial"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, ValueEnum)]
|
||||
enum Format {
|
||||
Json,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Cli::parse();
|
||||
let (sub, final_count, sort, format, output) = (cli.sub, cli.count, cli.sort, cli.format, cli.output);
|
||||
let initial = format!("/r/{sub}/{sort}.json?&raw_json=1");
|
||||
let (mut posts, mut after) = Post::fetch(&initial, false).await.unwrap();
|
||||
while posts.len() < final_count {
|
||||
print!("\r");
|
||||
let path = format!("/r/{sub}/{sort}.json?sort={sort}&t=&after={after}&raw_json=1");
|
||||
let (new_posts, new_after) = Post::fetch(&path, false).await.unwrap();
|
||||
posts.extend(new_posts);
|
||||
after = new_after;
|
||||
// Print number of posts fetched
|
||||
print!("Fetched {} posts", posts.len());
|
||||
std::io::stdout().flush().unwrap();
|
||||
}
|
||||
|
||||
match format {
|
||||
Format::Json => {
|
||||
let filename: String = output.unwrap_or_else(|| format!("{sub}.json"));
|
||||
let json = serde_json::to_string(&posts).unwrap();
|
||||
std::fs::write(filename, json).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +1,11 @@
|
|||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// CRATES
|
||||
use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences};
|
||||
use crate::{
|
||||
client::json,
|
||||
server::RequestExt,
|
||||
subreddit::{can_access_quarantine, quarantine},
|
||||
RequestExt,
|
||||
};
|
||||
use hyper::{Body, Request, Response};
|
||||
use once_cell::sync::Lazy;
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#![allow(dead_code)]
|
||||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
use brotli::enc::{BrotliCompress, BrotliEncoderParams};
|
||||
use cached::proc_macro::cached;
|
||||
|
@ -195,6 +196,12 @@ impl Route<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for Server {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Server {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
@ -723,7 +730,7 @@ mod tests {
|
|||
|
||||
CompressionType::Brotli => Box::new(BrotliDecompressor::new(body_cursor, expected_lorem_ipsum.len())),
|
||||
|
||||
_ => panic!("no decompressor for {}", expected_encoding.to_string()),
|
||||
_ => panic!("no decompressor for {}", expected_encoding),
|
||||
};
|
||||
|
||||
let mut decompressed = Vec::<u8>::new();
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
// CRATES
|
||||
|
@ -38,6 +40,7 @@ const PREFS: [&str; 19] = [
|
|||
"hide_awards",
|
||||
"hide_score",
|
||||
"disable_visit_reddit_confirmation",
|
||||
"video_quality",
|
||||
];
|
||||
|
||||
// FUNCTIONS
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
use crate::{config, utils};
|
||||
// CRATES
|
||||
use crate::utils::{
|
||||
catch_random, error, filter_posts, format_num, format_url, get_filters, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences, Subreddit,
|
||||
};
|
||||
use crate::{client::json, server::ResponseExt, RequestExt};
|
||||
use crate::{client::json, server::RequestExt, server::ResponseExt};
|
||||
use cookie::Cookie;
|
||||
use hyper::{Body, Request, Response};
|
||||
use rinja::Template;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
// CRATES
|
||||
use crate::client::json;
|
||||
use crate::server::RequestExt;
|
||||
|
|
19
src/utils.rs
19
src/utils.rs
|
@ -1,4 +1,6 @@
|
|||
#![allow(dead_code)]
|
||||
#![allow(clippy::cmp_owned)]
|
||||
|
||||
use crate::config::{self, get_setting};
|
||||
//
|
||||
// CRATES
|
||||
|
@ -11,6 +13,7 @@ use once_cell::sync::Lazy;
|
|||
use regex::Regex;
|
||||
use rinja::Template;
|
||||
use rust_embed::RustEmbed;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use serde_json_path::{JsonPath, JsonPathExt};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
@ -46,6 +49,7 @@ pub enum ResourceType {
|
|||
}
|
||||
|
||||
// Post flair with content, background color and foreground color
|
||||
#[derive(Serialize)]
|
||||
pub struct Flair {
|
||||
pub flair_parts: Vec<FlairPart>,
|
||||
pub text: String,
|
||||
|
@ -54,7 +58,7 @@ pub struct Flair {
|
|||
}
|
||||
|
||||
// Part of flair, either emoji or text
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Serialize)]
|
||||
pub struct FlairPart {
|
||||
pub flair_part_type: String,
|
||||
pub value: String,
|
||||
|
@ -96,12 +100,14 @@ impl FlairPart {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Author {
|
||||
pub name: String,
|
||||
pub flair: Flair,
|
||||
pub distinguished: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Poll {
|
||||
pub poll_options: Vec<PollOption>,
|
||||
pub voting_end_timestamp: (String, String),
|
||||
|
@ -129,6 +135,7 @@ impl Poll {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct PollOption {
|
||||
pub id: u64,
|
||||
pub text: String,
|
||||
|
@ -158,13 +165,14 @@ impl PollOption {
|
|||
}
|
||||
|
||||
// Post flags with nsfw and stickied
|
||||
#[derive(Serialize)]
|
||||
pub struct Flags {
|
||||
pub spoiler: bool,
|
||||
pub nsfw: bool,
|
||||
pub stickied: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Media {
|
||||
pub url: String,
|
||||
pub alt_url: String,
|
||||
|
@ -264,6 +272,7 @@ impl Media {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct GalleryMedia {
|
||||
pub url: String,
|
||||
pub width: i64,
|
||||
|
@ -304,6 +313,7 @@ impl GalleryMedia {
|
|||
}
|
||||
|
||||
// Post containing content, metadata and media
|
||||
#[derive(Serialize)]
|
||||
pub struct Post {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
|
@ -470,7 +480,7 @@ pub struct Comment {
|
|||
pub prefs: Preferences,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
#[derive(Default, Clone, Serialize)]
|
||||
pub struct Award {
|
||||
pub name: String,
|
||||
pub icon_url: String,
|
||||
|
@ -484,6 +494,7 @@ impl std::fmt::Display for Award {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Awards(pub Vec<Award>);
|
||||
|
||||
impl std::ops::Deref for Awards {
|
||||
|
@ -602,6 +613,7 @@ pub struct Preferences {
|
|||
pub blur_nsfw: String,
|
||||
pub unblur_on_hover: String,
|
||||
pub hide_hls_notification: String,
|
||||
pub video_quality: String,
|
||||
pub hide_sidebar_and_summary: String,
|
||||
pub use_hls: String,
|
||||
pub autoplay_videos: String,
|
||||
|
@ -643,6 +655,7 @@ impl Preferences {
|
|||
unblur_on_hover: setting(req, "unblur_on_hover"),
|
||||
use_hls: setting(req, "use_hls"),
|
||||
hide_hls_notification: setting(req, "hide_hls_notification"),
|
||||
video_quality: setting(req, "video_quality"),
|
||||
autoplay_videos: setting(req, "autoplay_videos"),
|
||||
fixed_navbar: setting_or_default(req, "fixed_navbar", "on".to_string()),
|
||||
disable_visit_reddit_confirmation: setting(req, "disable_visit_reddit_confirmation"),
|
||||
|
|
38
static/check_update.js
Normal file
38
static/check_update.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
async function checkInstanceUpdateStatus() {
|
||||
try {
|
||||
const response = await fetch('/commits.atom');
|
||||
const text = await response.text();
|
||||
const parser = new DOMParser();
|
||||
const xmlDoc = parser.parseFromString(text, "application/xml");
|
||||
const entries = xmlDoc.getElementsByTagName('entry');
|
||||
const localCommit = document.getElementById('git_commit').dataset.value;
|
||||
|
||||
let statusMessage = '';
|
||||
|
||||
if (entries.length > 0) {
|
||||
const commitHashes = Array.from(entries).map(entry => {
|
||||
const id = entry.getElementsByTagName('id')[0].textContent;
|
||||
return id.split('/').pop();
|
||||
});
|
||||
|
||||
const commitIndex = commitHashes.indexOf(localCommit);
|
||||
|
||||
if (commitIndex === 0) {
|
||||
statusMessage = '✅ Instance is up to date.';
|
||||
} else if (commitIndex > 0) {
|
||||
statusMessage = `⚠️ This instance is not up to date and is ${commitIndex} commits old. Test and confirm on an up-to-date instance before reporting.`;
|
||||
} else {
|
||||
statusMessage = `⚠️ This instance is not up to date and is at least ${commitHashes.length} commits old. Test and confirm on an up-to-date instance before reporting.`;
|
||||
}
|
||||
} else {
|
||||
statusMessage = '⚠️ Unable to fetch commit information.';
|
||||
}
|
||||
|
||||
document.getElementById('update-status').innerText = statusMessage;
|
||||
} catch (error) {
|
||||
console.error('Error fetching commits:', error);
|
||||
document.getElementById('update-status').innerText = '⚠️ Error checking update status.';
|
||||
}
|
||||
}
|
||||
|
||||
checkInstanceUpdateStatus();
|
|
@ -1,5 +1,7 @@
|
|||
// @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0
|
||||
(function () {
|
||||
const configElement = document.getElementById('video_quality');
|
||||
const qualitySetting = configElement.getAttribute('data-value');
|
||||
if (Hls.isSupported()) {
|
||||
var videoSources = document.querySelectorAll("video source[type='application/vnd.apple.mpegurl']");
|
||||
videoSources.forEach(function (source) {
|
||||
|
@ -28,13 +30,26 @@
|
|||
|
||||
oldVideo.parentNode.replaceChild(newVideo, oldVideo);
|
||||
|
||||
function getIndexOfDefault(length) {
|
||||
switch (qualitySetting) {
|
||||
case 'best':
|
||||
return length - 1;
|
||||
case 'medium':
|
||||
return Math.floor(length / 2);
|
||||
case 'worst':
|
||||
return 0;
|
||||
default:
|
||||
return length - 1;
|
||||
}
|
||||
}
|
||||
|
||||
function initializeHls() {
|
||||
newVideo.removeEventListener('play', initializeHls);
|
||||
var hls = new Hls({ autoStartLoad: false });
|
||||
hls.loadSource(playlist);
|
||||
hls.attachMedia(newVideo);
|
||||
hls.on(Hls.Events.MANIFEST_PARSED, function () {
|
||||
hls.loadLevel = hls.levels.length - 1;
|
||||
hls.loadLevel = getIndexOfDefault(hls.levels.length);
|
||||
var availableLevels = hls.levels.map(function(level) {
|
||||
return {
|
||||
height: level.height,
|
||||
|
@ -73,18 +88,18 @@
|
|||
function addQualitySelector(videoElement, hlsInstance, availableLevels) {
|
||||
var qualitySelector = document.createElement('select');
|
||||
qualitySelector.classList.add('quality-selector');
|
||||
var last = availableLevels.length - 1;
|
||||
var defaultIndex = getIndexOfDefault(availableLevels.length);
|
||||
availableLevels.forEach(function (level, index) {
|
||||
var option = document.createElement('option');
|
||||
option.value = index.toString();
|
||||
var bitrate = (level.bitrate / 1_000).toFixed(0);
|
||||
option.text = level.height + 'p (' + bitrate + ' kbps)';
|
||||
if (index === last) {
|
||||
if (index === defaultIndex) {
|
||||
option.selected = "selected";
|
||||
}
|
||||
qualitySelector.appendChild(option);
|
||||
});
|
||||
qualitySelector.selectedIndex = availableLevels.length - 1;
|
||||
qualitySelector.selectedIndex = defaultIndex;
|
||||
qualitySelector.addEventListener('change', function () {
|
||||
var selectedIndex = qualitySelector.selectedIndex;
|
||||
hlsInstance.nextLevel = selectedIndex;
|
||||
|
|
|
@ -27,6 +27,8 @@
|
|||
<link rel="manifest" type="application/json" href="/manifest.json">
|
||||
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
|
||||
<link rel="stylesheet" type="text/css" href="/style.css?v={{ env!("CARGO_PKG_VERSION") }}">
|
||||
<!-- Video quality -->
|
||||
<div id="video_quality" data-value="{{ prefs.video_quality }}"></div>
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body class="
|
||||
|
|
|
@ -6,10 +6,15 @@
|
|||
<h1>{{ msg }}</h1>
|
||||
<h3><a href="https://www.redditstatus.com/">Reddit Status</a></h3>
|
||||
<br />
|
||||
<h3 id="update-status"></h3>
|
||||
<br>
|
||||
<div id="git_commit" data-value="{{ crate::instance_info::INSTANCE_INFO.git_commit }}"></div>
|
||||
<script src="/check_update.js"></script>
|
||||
|
||||
<h3>Expected something to work? <a
|
||||
href="https://github.com/redlib-org/redlib/issues/new?assignees=&labels=bug&projects=&template=bug_report.md&title=%F0%9F%90%9B+Bug+Report%3A+{{ msg }}">Report
|
||||
an issue</a></h3>
|
||||
<br />
|
||||
<h3>Head back <a href="/">home</a>?</h3>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -46,6 +46,12 @@
|
|||
</fieldset>
|
||||
<fieldset>
|
||||
<legend>Content</legend>
|
||||
<div class="prefs-group">
|
||||
<label for="video_quality">Video quality:</label>
|
||||
<select name="video_quality" id="video_quality">
|
||||
{% call utils::options(prefs.video_quality, ["best", "medium", "worst"], "best") %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="prefs-group">
|
||||
<label for="post_sort" title="Applies only to subreddit feeds">Default subreddit post sort:</label>
|
||||
<select name="post_sort">
|
||||
|
|
Loading…
Add table
Reference in a new issue