Add mastodon posting, update deps, go async

This commit is contained in:
Sam W 2023-06-16 16:12:47 +01:00
parent fa1c07e059
commit ad118fbec9
8 changed files with 1872 additions and 972 deletions

2325
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -5,14 +5,12 @@ version = "0.2.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"]} reqwest = { version = "0.11", features = ["json", "multipart"]}
serde_json = "*" serde_json = "*"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
scraper = "*" scraper = "*"
rand = "*" rand = "*"
resvg = "*" resvg = "*"
tiny-skia = "*"
usvg = "*"
oauth1 = "*" oauth1 = "*"
clap = { version = "*", features = ["derive"] } clap = { version = "*", features = ["derive"] }
webbrowser = "*" webbrowser = "*"
@ -23,6 +21,9 @@ regex = "1.6.0"
image = "0.24.3" image = "0.24.3"
viuer = "0.6.1" viuer = "0.6.1"
url = { version = "2.3.1", features = ["serde"] } url = { version = "2.3.1", features = ["serde"] }
megalodon = { git = "https://github.com/wlcx/megalodon-rs.git" }
tokio = "*"
futures-util = "*"
[build-dependencies] [build-dependencies]
toml = "*" toml = "*"

View File

@ -2,15 +2,15 @@
"nodes": { "nodes": {
"devshell": { "devshell": {
"inputs": { "inputs": {
"flake-utils": "flake-utils", "nixpkgs": "nixpkgs",
"nixpkgs": "nixpkgs" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1660811669, "lastModified": 1686680692,
"narHash": "sha256-V6lmsaLNFz41myppL0yxglta92ijkSvpZ+XVygAh+bU=", "narHash": "sha256-SsLZz3TDleraAiJq4EkmdyewSyiv5g0LZYc6vaLZOMQ=",
"owner": "numtide", "owner": "numtide",
"repo": "devshell", "repo": "devshell",
"rev": "c2feacb46ee69949124c835419861143c4016fb5", "rev": "fd6223370774dd9c33354e87a007004b5fd36442",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -20,27 +20,15 @@
} }
}, },
"flake-utils": { "flake-utils": {
"locked": { "inputs": {
"lastModified": 1642700792, "systems": "systems_2"
"narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "846b2ae0fc4cc943637d3d1def4454213e203cba",
"type": "github"
}, },
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"locked": { "locked": {
"lastModified": 1656928814, "lastModified": 1681202837,
"narHash": "sha256-RIFfgBuKz6Hp89yRr7+NR5tzIAbn52h8vT6vXkYjZoM=", "narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "7e2a3b3dfd9af950a856d66b0a7d01e3c18aa249", "rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -54,11 +42,11 @@
"nixpkgs": "nixpkgs_2" "nixpkgs": "nixpkgs_2"
}, },
"locked": { "locked": {
"lastModified": 1659610603, "lastModified": 1686572087,
"narHash": "sha256-LYgASYSPYo7O71WfeUOaEUzYfzuXm8c8eavJcel+pfI=", "narHash": "sha256-jXTut7ZSYqLEgm/nTk7TuVL2ExahTip605bLINklAnQ=",
"owner": "nix-community", "owner": "nix-community",
"repo": "naersk", "repo": "naersk",
"rev": "c6a45e4277fa58abd524681466d3450f896dc094", "rev": "8507af04eb40c5520bd35d9ce6f9d2342cea5ad1",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -69,11 +57,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1643381941, "lastModified": 1677383253,
"narHash": "sha256-pHTwvnN4tTsEKkWlXQ8JMY423epos8wUOhthpwJjtpc=", "narHash": "sha256-UfpzWfSxkfXHnb4boXZNaKsAcUrZT9Hw+tao1oZxd08=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "5efc8ca954272c4376ac929f4c5ffefcc20551d5", "rev": "9952d6bc395f5841262b006fbace8dd7e143b634",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -85,12 +73,11 @@
}, },
"nixpkgs_2": { "nixpkgs_2": {
"locked": { "locked": {
"lastModified": 1661353537, "lastModified": 1685789966,
"narHash": "sha256-1E2IGPajOsrkR49mM5h55OtYnU0dGyre6gl60NXKITE=", "narHash": "sha256-pyqctu5Cq1jwymO3Os0/RNj5Nm3q5kmRCT24p7gtG70=",
"owner": "NixOS", "path": "/nix/store/hnkjxwx9zv2k0gkiznbpkrsvyrzaz6w1-source",
"repo": "nixpkgs", "rev": "4eaa9e3eb36386de0c6a268ba5da72cafc959619",
"rev": "0e304ff0d9db453a4b230e9386418fd974d5804a", "type": "path"
"type": "github"
}, },
"original": { "original": {
"id": "nixpkgs", "id": "nixpkgs",
@ -99,12 +86,11 @@
}, },
"nixpkgs_3": { "nixpkgs_3": {
"locked": { "locked": {
"lastModified": 1662907018, "lastModified": 1685789966,
"narHash": "sha256-rMPfDmY7zJzv/tJj+LComcGEa1UuwI67kpbz5WC6abE=", "narHash": "sha256-pyqctu5Cq1jwymO3Os0/RNj5Nm3q5kmRCT24p7gtG70=",
"owner": "NixOS", "path": "/nix/store/hnkjxwx9zv2k0gkiznbpkrsvyrzaz6w1-source",
"repo": "nixpkgs", "rev": "4eaa9e3eb36386de0c6a268ba5da72cafc959619",
"rev": "17352e8995e1409636b0817a7f38d6314ccd73c4", "type": "path"
"type": "github"
}, },
"original": { "original": {
"id": "nixpkgs", "id": "nixpkgs",
@ -113,11 +99,11 @@
}, },
"nixpkgs_4": { "nixpkgs_4": {
"locked": { "locked": {
"lastModified": 1659102345, "lastModified": 1681358109,
"narHash": "sha256-Vbzlz254EMZvn28BhpN8JOi5EuKqnHZ3ujFYgFcSGvk=", "narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "11b60e4f80d87794a2a4a8a256391b37c59a1ea7", "rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -138,15 +124,15 @@
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"flake-utils": "flake-utils_2", "flake-utils": "flake-utils",
"nixpkgs": "nixpkgs_4" "nixpkgs": "nixpkgs_4"
}, },
"locked": { "locked": {
"lastModified": 1662001050, "lastModified": 1686795910,
"narHash": "sha256-tduflWLNZ6C3Xz0eUHf5Cnnfl47Vgey2NUY5ZU9f/S4=", "narHash": "sha256-jDa40qRZ0GRQtP9EMZdf+uCbvzuLnJglTUI2JoHfWDc=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "6f27501ff78beb62728cb292daca846fcab96c9e", "rev": "5c2b97c0a9bc5217fc3dfb1555aae0fb756d99f9",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -155,13 +141,61 @@
"type": "github" "type": "github"
} }
}, },
"utils": { "systems": {
"locked": { "locked": {
"lastModified": 1637014545, "lastModified": 1681028828,
"narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems_3"
},
"locked": {
"lastModified": 1685518550,
"narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -21,28 +21,31 @@
inherit system; inherit system;
overlays = [(import rust-overlay)]; overlays = [(import rust-overlay)];
}; };
rust = pkgs.rust-bin.stable.latest.default; rust = pkgs.rust-bin.stable.latest.default.override {
extensions = [ "rust-src" ];
};
# Override naersk to use our chosen rust version from rust-overlay # Override naersk to use our chosen rust version from rust-overlay
naersk-lib = naersk.lib.${system}.override { naersk-lib = naersk.lib.${system}.override {
cargo = rust; cargo = rust;
rustc = rust; rustc = rust;
}; };
in rec { packig = naersk-lib.buildPackage {
packages.default = naersk-lib.buildPackage {
pname = "iso7010-a-day"; pname = "iso7010-a-day";
root = ./.; root = ./.;
buildInputs = [pkgs.openssl pkgs.pkgconfig]; buildInputs = [pkgs.openssl pkgs.pkgconfig];
}; };
in {
packages.default = packig;
apps.default = utils.lib.mkApp {drv = packages.default;}; apps.default = utils.lib.mkApp {drv = packig;};
hydraJobs.build = packages.default; hydraJobs.build = packig;
# Provide a dev env with rust and rls # Provide a dev env with rust and rls
devShells.default = let devShells.default = let
pkgs = import nixpkgs { pkgs = import nixpkgs {
inherit system; inherit system;
overlays = [devshell.overlay]; overlays = [devshell.overlays.default];
}; };
in in
pkgs.devshell.mkShell { pkgs.devshell.mkShell {

View File

@ -1,16 +1,23 @@
use mastodon::authorize_fedi;
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
use std::convert::TryInto; use std::convert::TryInto;
use std::io::Cursor; use std::io::Cursor;
use tracing::{event, Level}; use tracing::{event, Level};
mod mastodon;
mod twitter; mod twitter;
mod wiki; mod wiki;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use image::{DynamicImage, RgbaImage}; use image::{DynamicImage, RgbaImage};
use resvg::tiny_skia::{Paint, PathBuilder, Pixmap, PixmapPaint, Stroke};
use resvg::usvg::TreeParsing;
use resvg::usvg::{Options, Transform};
use resvg::Tree;
use std::borrow::Cow; use std::borrow::Cow;
use tiny_skia::{Paint, PathBuilder, Pixmap, PixmapPaint, Stroke, Transform};
use twitter::*; use twitter::*;
use wiki::*; use wiki::*;
use crate::mastodon::toot;
static APP_USER_AGENT: &str = concat!( static APP_USER_AGENT: &str = concat!(
"bot_", "bot_",
env!("CARGO_PKG_NAME"), env!("CARGO_PKG_NAME"),
@ -22,30 +29,31 @@ static APP_USER_AGENT: &str = concat!(
// Render the raw SVG data to an image // Render the raw SVG data to an image
fn render_svg(data: &[u8], height: u32, with_border: bool) -> StdError<DynamicImage> { fn render_svg(data: &[u8], height: u32, with_border: bool) -> StdError<DynamicImage> {
let opt = usvg::Options::default(); let opt = Options::default();
let rtree = usvg::Tree::from_data(data, &opt.to_ref()).expect("couldn't parse"); let rtree = resvg::usvg::Tree::from_data(data, &opt).expect("couldn't parse");
let svg_size = rtree.svg_node().size; let svg_size = rtree.size;
// Work out how wide the pixmap of height `height` needs to be to entirely fit the SVG. // Work out how wide the pixmap of height `height` needs to be to entirely fit the SVG.
let pm_width = ((height as f64 / svg_size.height()) * svg_size.width()).ceil() as u32; let scale_factor = height as f32 / svg_size.height();
let pm_width = (scale_factor * svg_size.width()).ceil() as u32;
let mut pixmap = Pixmap::new(pm_width, height).ok_or("Error creating pixmap")?; let mut pixmap = Pixmap::new(pm_width, height).ok_or("Error creating pixmap")?;
// Render the svg into a pixmap. // Render the svg into a pixmap.
resvg::render(&rtree, usvg::FitTo::Height(height), pixmap.as_mut()) Tree::from_usvg(&rtree).render(
.ok_or("Error rendering svg")?; Transform::from_scale(scale_factor, scale_factor),
&mut pixmap.as_mut(),
);
// Make a wider pixmap with a 16:9 AR and the same height. This is a blesséd ratio by twitter // Make a wider pixmap with a 16:9 AR and the same height. This is a blesséd ratio by twitter
// and means we see the whole image nicely in the timeline with no truncation. // and means we see the whole image nicely in the timeline with no truncation.
let mut bigger_pixmap = let mut bigger_pixmap =
Pixmap::new(height / 9 * 16, height).ok_or("Error creating bigger pixmap")?; Pixmap::new(height / 9 * 16, height).ok_or("Error creating bigger pixmap")?;
// Then draw our freshly rendered SVG into the middle of the bigger pixmap. // Then draw our freshly rendered SVG into the middle of the bigger pixmap.
bigger_pixmap bigger_pixmap.draw_pixmap(
.draw_pixmap( ((bigger_pixmap.width() - pm_width) / 2).try_into().unwrap(),
((bigger_pixmap.width() - pm_width) / 2).try_into().unwrap(), 0,
0, pixmap.as_ref(),
pixmap.as_ref(), &PixmapPaint::default(),
&PixmapPaint::default(), Transform::identity(),
Transform::identity(), None,
None, );
)
.ok_or("Error drawing onto bigger pixmap")?;
let (w, h) = (bigger_pixmap.width(), bigger_pixmap.height()); let (w, h) = (bigger_pixmap.width(), bigger_pixmap.height());
// Render a red border for debug purposes // Render a red border for debug purposes
if with_border { if with_border {
@ -88,14 +96,16 @@ struct Cli {
#[derive(Subcommand)] #[derive(Subcommand)]
enum Commands { enum Commands {
/// Authorize the twitter application to acccess a user's account /// Authorize the twitter application to acccess a user's account
Authorize, AuthorizeTwitter,
/// Scrape images from the category on wikimedia commons /// Scrape images from the category on wikimedia commons
ScrapeCategory, ScrapeCategory,
/// Scrape images from the iso7010 wikipedia page /// Scrape images from the iso7010 wikipedia page
ScrapeWeb, ScrapeWeb,
/// List tweets from the authed user's timeline /// List tweets from the authed user's timeline
ListTweets, ListTweets,
/// Run the bot - scrape, pick a random entry and tweet it /// Authorize against a pleroma server
AuthorizeFedi,
/// Run the bot - scrape, pick a random entry and toot it
RunBot { RunBot {
#[clap(short, long, action)] #[clap(short, long, action)]
dry_run: bool, dry_run: bool,
@ -106,20 +116,22 @@ enum Commands {
Whoami, Whoami,
} }
fn main() -> StdError<()> { #[tokio::main]
async fn main() -> StdError<()> {
tracing_subscriber::fmt::init(); tracing_subscriber::fmt::init();
let cli = Cli::parse(); let cli = Cli::parse();
match &cli.command { match &cli.command {
Commands::Authorize => do_authorize(), Commands::AuthorizeTwitter => do_authorize().await,
Commands::ScrapeCategory => do_scrape_category(), Commands::ScrapeCategory => do_scrape_category().await,
Commands::ScrapeWeb => do_scrape_web(), Commands::ScrapeWeb => do_scrape_web().await,
Commands::ListTweets => do_list_tweets(), Commands::ListTweets => do_list_tweets().await,
Commands::Whoami => do_whoami(), Commands::Whoami => do_whoami().await,
Commands::RunBot { dry_run, target } => run_bot(*dry_run, target.to_owned()), Commands::AuthorizeFedi => authorize_fedi().await,
Commands::RunBot { dry_run, target } => run_bot(*dry_run, target.to_owned()).await,
} }
} }
fn do_whoami() -> StdError<()> { async fn do_whoami() -> StdError<()> {
let user_token = user_token_from_env(); let user_token = user_token_from_env();
let user: serde_json::Value = twitter_api( let user: serde_json::Value = twitter_api(
@ -127,13 +139,15 @@ fn do_whoami() -> StdError<()> {
Some(&user_token), Some(&user_token),
APIAction::Get, APIAction::Get,
&[], &[],
)? )
.json()?; .await?
.json()
.await?;
println!("User @{}, (id: {})", user["screen_name"], user["id"]); println!("User @{}, (id: {})", user["screen_name"], user["id"]);
Ok(()) Ok(())
} }
fn do_list_tweets() -> StdError<()> { async fn do_list_tweets() -> StdError<()> {
let user_token = user_token_from_env(); let user_token = user_token_from_env();
let user = twitter_api( let user = twitter_api(
@ -141,8 +155,10 @@ fn do_list_tweets() -> StdError<()> {
Some(&user_token), Some(&user_token),
APIAction::Get, APIAction::Get,
&[], &[],
)? )
.json::<serde_json::Value>()?; .await?
.json::<serde_json::Value>()
.await?;
let id = user["id"].as_u64().unwrap().to_string(); let id = user["id"].as_u64().unwrap().to_string();
let mut timeline = vec![]; let mut timeline = vec![];
@ -165,8 +181,10 @@ fn do_list_tweets() -> StdError<()> {
Some(&user_token), Some(&user_token),
APIAction::Get, APIAction::Get,
&[], &[],
)? )
.json::<serde_json::Value>()?; .await?
.json::<serde_json::Value>()
.await?;
let chunk = timeline_chunk.as_array().unwrap().to_owned(); let chunk = timeline_chunk.as_array().unwrap().to_owned();
event!(Level::INFO, count = chunk.len(), "Got tweets."); event!(Level::INFO, count = chunk.len(), "Got tweets.");
if chunk.is_empty() { if chunk.is_empty() {
@ -186,8 +204,9 @@ fn do_list_tweets() -> StdError<()> {
Ok(()) Ok(())
} }
fn do_scrape_category() -> StdError<()> { async fn do_scrape_category() -> StdError<()> {
let mut files = get_files_in_category("Category:ISO_7010_safety_signs_(vector_drawings)")?; let mut files =
get_files_in_category("Category:ISO_7010_safety_signs_(vector_drawings)").await?;
files.sort(); files.sort();
for f in files { for f in files {
println!("{}", f); println!("{}", f);
@ -196,8 +215,12 @@ fn do_scrape_category() -> StdError<()> {
Ok(()) Ok(())
} }
fn do_scrape_web() -> StdError<()> { async fn do_scrape_web() -> StdError<()> {
let mut files: Vec<_> = scrape_web()?.into_iter().map(|(_, file)| file).collect(); let mut files: Vec<_> = scrape_web()
.await?
.into_iter()
.map(|(_, file)| file)
.collect();
files.sort(); files.sort();
for f in files { for f in files {
println!("{}", f); println!("{}", f);
@ -206,16 +229,16 @@ fn do_scrape_web() -> StdError<()> {
Ok(()) Ok(())
} }
fn get_client(headers: Option<reqwest::header::HeaderMap>) -> StdError<reqwest::blocking::Client> { fn get_client(headers: Option<reqwest::header::HeaderMap>) -> StdError<reqwest::Client> {
let mut c = reqwest::blocking::Client::builder().user_agent(APP_USER_AGENT); let mut c = reqwest::Client::builder().user_agent(APP_USER_AGENT);
if let Some(headers) = headers { if let Some(headers) = headers {
c = c.default_headers(headers); c = c.default_headers(headers);
} }
Ok(c.build()?) Ok(c.build()?)
} }
fn run_bot(dry_run: bool, target: Option<String>) -> StdError<()> { async fn run_bot(dry_run: bool, target: Option<String>) -> StdError<()> {
let all = scrape_web()?; let all = scrape_web().await?;
let (title, filename) = if let Some(target) = target { let (title, filename) = if let Some(target) = target {
all.iter() all.iter()
.find(|(title, _)| title.to_lowercase().contains(&target.to_lowercase())) .find(|(title, _)| title.to_lowercase().contains(&target.to_lowercase()))
@ -227,10 +250,10 @@ fn run_bot(dry_run: bool, target: Option<String>) -> StdError<()> {
let client = get_client(None)?; let client = get_client(None)?;
event!(Level::INFO, "Fetching metadata..."); event!(Level::INFO, "Fetching metadata...");
// TODO: could crash, probably doesn't matter // TODO: could crash, probably doesn't matter
let meta = get_file_metadata(&[filename.as_str()])?.remove(0); let meta = get_file_metadata(&[filename.as_str()]).await?.remove(0);
event!(Level::INFO, %meta, "Got metadata"); event!(Level::INFO, %meta, "Got metadata");
event!(Level::INFO, url = meta.url.to_string(), "Fetching image"); event!(Level::INFO, url = meta.url.to_string(), "Fetching image");
let svg = client.get(meta.url).send()?.bytes()?; let svg = client.get(meta.url).send().await?.bytes().await?;
let text = format!( let text = format!(
"{}\n\nImage source: {}\nAuthor: Wikimedia Commons user {}\n{}{}", "{}\n\nImage source: {}\nAuthor: Wikimedia Commons user {}\n{}{}",
@ -247,7 +270,7 @@ fn run_bot(dry_run: bool, target: Option<String>) -> StdError<()> {
let img = render_svg(&svg, 1000, false)?; let img = render_svg(&svg, 1000, false)?;
let mut buf = Cursor::new(Vec::new()); let mut buf = Cursor::new(Vec::new());
img.write_to(&mut buf, image::ImageFormat::Png)?; img.write_to(&mut buf, image::ImageFormat::Png)?;
tweet(&text, Some(buf.into_inner().into()))?; toot(&text, Some(buf.into_inner().into())).await?;
} else { } else {
// Render the image smaller for output to terminal // Render the image smaller for output to terminal
let img = render_svg(&svg, 128, true)?; let img = render_svg(&svg, 128, true)?;

87
src/mastodon.rs Normal file
View File

@ -0,0 +1,87 @@
// Interface to mastodon (etc) instances
use std::borrow::Cow;
use megalodon::entities::{Attachment, UploadMedia};
use megalodon::generator;
use megalodon::megalodon::PostStatusInputOptions;
use megalodon::{self, megalodon::UploadMediaInputOptions};
use crate::{StdError, APP_USER_AGENT};
const FEDI_ACCESS_TOKEN_ENV_VAR: &str = "FEDI_ACCESS_TOKEN";
const FEDI_INSTANCE_ENV_VAR: &str = "FEDI_INSTANCE";
pub async fn toot(text: &str, img: Option<Cow<'static, [u8]>>) -> StdError<()> {
let client = megalodon::generator(
megalodon::SNS::Pleroma,
std::env::var(FEDI_INSTANCE_ENV_VAR)
.unwrap_or_else(|_| panic!("{} env var not present", FEDI_ACCESS_TOKEN_ENV_VAR))
.into(),
Some(
std::env::var(FEDI_ACCESS_TOKEN_ENV_VAR)
.unwrap_or_else(|_| panic!("{} env var not present", FEDI_ACCESS_TOKEN_ENV_VAR))
.into(),
),
Some(APP_USER_AGENT.into()),
);
let mut ops = PostStatusInputOptions::default();
if let Some(img) = img {
let media = client
.upload_media_raw(
// TODO: get better at lifetimes
Box::leak(Box::new(img)),
Some(&UploadMediaInputOptions::default()),
)
.await?;
ops.media_ids = Some(vec![match media.json {
UploadMedia::Attachment(a) => a.id,
UploadMedia::AsyncAttachment(a) => a.id,
}]);
}
client.post_status(text.into(), Some(&ops)).await?;
Ok(())
}
pub async fn authorize_fedi() -> StdError<()> {
let url = std::env::var(FEDI_INSTANCE_ENV_VAR)
.unwrap_or_else(|_| panic!("{} env var not present", FEDI_ACCESS_TOKEN_ENV_VAR))
.into();
let client = generator(megalodon::SNS::Pleroma, url, None, None);
let options = megalodon::megalodon::AppInputOptions {
scopes: Some([String::from("read"), String::from("write")].to_vec()),
..Default::default()
};
match client.register_app(String::from("iso7010"), &options).await {
Ok(app_data) => {
println!("{}", app_data.url.unwrap());
println!("Enter code:");
let mut code = String::new();
std::io::stdin().read_line(&mut code).ok();
match client
.fetch_access_token(
app_data.client_id,
app_data.client_secret,
code.trim().to_string(),
megalodon::default::NO_REDIRECT.to_string(),
)
.await
{
Ok(token_data) => {
println!("token: {}", token_data.access_token);
if let Some(refresh) = token_data.refresh_token {
println!("refresh_token: {}", refresh);
}
}
Err(err) => {
panic!("{}", err);
}
}
}
Err(err) => {
panic!("{}", err);
}
}
Ok(())
}

View File

@ -54,7 +54,7 @@ impl TryInto<reqwest::Url> for TwitterEndpoint {
pub enum PostData<'a> { pub enum PostData<'a> {
Empty, Empty,
Multipart(reqwest::blocking::multipart::Form), Multipart(reqwest::multipart::Form),
Data(&'a [(&'a str, Cow<'a, str>)]), Data(&'a [(&'a str, Cow<'a, str>)]),
} }
@ -88,12 +88,12 @@ impl APIAction<'_> {
} }
// Make an authed twitter API request // Make an authed twitter API request
#[instrument(skip(user_token), fields(url=url.to_string()))] #[instrument(skip(user_token), fields(url=url.to_string()))]
pub fn twitter_api<'a>( pub async fn twitter_api<'a>(
url: reqwest::Url, url: reqwest::Url,
user_token: Option<&oauth1::Token>, user_token: Option<&oauth1::Token<'a>>,
action: APIAction, action: APIAction<'a>,
extra_oauth_params: &[(&str, &str)], extra_oauth_params: &[(&str, &str)],
) -> StdError<reqwest::blocking::Response> { ) -> StdError<reqwest::Response> {
let consumer_token = oauth1::Token::new( let consumer_token = oauth1::Token::new(
std::env::var(APP_TOKEN_ENV_VAR)?, std::env::var(APP_TOKEN_ENV_VAR)?,
std::env::var(APP_SECRET_ENV_VAR)?, std::env::var(APP_SECRET_ENV_VAR)?,
@ -142,19 +142,19 @@ pub fn twitter_api<'a>(
APIAction::Post(PostData::Multipart(form)) => client.post(url).multipart(form), APIAction::Post(PostData::Multipart(form)) => client.post(url).multipart(form),
}; };
event!(Level::INFO, "Sending request"); event!(Level::INFO, "Sending request");
let res = req.send()?; let res = req.send().await?;
if !res.status().is_success() { if !res.status().is_success() {
return Err(format!( return Err(format!(
"Got non-200 response: status {}, {}", "Got non-200 response: status {}, {}",
res.status(), res.status(),
res.text()? res.text().await?
) )
.into()); .into());
} }
Ok(res) Ok(res)
} }
pub fn do_authorize() -> StdError<()> { pub async fn do_authorize() -> StdError<()> {
println!("Authorizing you lol!"); println!("Authorizing you lol!");
// Oauth1 leg 1 // Oauth1 leg 1
@ -163,8 +163,10 @@ pub fn do_authorize() -> StdError<()> {
None, None,
APIAction::Post(PostData::Empty), APIAction::Post(PostData::Empty),
&[("oauth_callback", CB_URL)], &[("oauth_callback", CB_URL)],
)? )
.text()?; .await?
.text()
.await?;
let returned_params: HashMap<&str, &str> = res let returned_params: HashMap<&str, &str> = res
.split('&') .split('&')
@ -209,8 +211,10 @@ pub fn do_authorize() -> StdError<()> {
Cow::Owned(oauth_verifier), Cow::Owned(oauth_verifier),
)])), )])),
&[("oauth_token", returned_params["oauth_token"])], &[("oauth_token", returned_params["oauth_token"])],
)? )
.text()?; .await?
.text()
.await?;
let returned_params: HashMap<&str, &str> = res let returned_params: HashMap<&str, &str> = res
.split('&') .split('&')
.map(|s| s.split('=').collect_tuple()) .map(|s| s.split('=').collect_tuple())
@ -229,20 +233,24 @@ pub fn do_authorize() -> StdError<()> {
Ok(()) Ok(())
} }
fn upload_image(user_token: &oauth1::Token, img: Cow<'static, [u8]>) -> StdError<u64> { async fn upload_image<'a>(
let form = reqwest::blocking::multipart::Form::new() user_token: &oauth1::Token<'a>,
.part("media", reqwest::blocking::multipart::Part::bytes(img)); img: Cow<'static, [u8]>,
) -> StdError<u64> {
let form = reqwest::multipart::Form::new().part("media", reqwest::multipart::Part::bytes(img));
let res: serde_json::Value = twitter_api( let res: serde_json::Value = twitter_api(
"https://upload.twitter.com/1.1/media/upload.json".try_into()?, "https://upload.twitter.com/1.1/media/upload.json".try_into()?,
Some(user_token), Some(user_token),
APIAction::Post(PostData::Multipart(form)), APIAction::Post(PostData::Multipart(form)),
&[], &[],
)? )
.json()?; .await?
.json()
.await?;
Ok(res["media_id"].as_u64().ok_or("media_id not u64!")?) Ok(res["media_id"].as_u64().ok_or("media_id not u64!")?)
} }
pub fn tweet(text: &str, img: Option<Cow<'static, [u8]>>) -> StdError<()> { pub async fn tweet(text: &str, img: Option<Cow<'static, [u8]>>) -> StdError<()> {
let user_token = oauth1::Token::new( let user_token = oauth1::Token::new(
std::env::var(USER_TOKEN_ENV_VAR)?, std::env::var(USER_TOKEN_ENV_VAR)?,
std::env::var(USER_SECRET_ENV_VAR)?, std::env::var(USER_SECRET_ENV_VAR)?,
@ -253,8 +261,10 @@ pub fn tweet(text: &str, img: Option<Cow<'static, [u8]>>) -> StdError<()> {
Some(&user_token), Some(&user_token),
APIAction::Get, APIAction::Get,
&[], &[],
)? )
.json()?; .await?
.json()
.await?;
println!( println!(
"Tweeting for user @{}, (id: {})", "Tweeting for user @{}, (id: {})",
user["screen_name"], user["id"] user["screen_name"], user["id"]
@ -262,7 +272,7 @@ pub fn tweet(text: &str, img: Option<Cow<'static, [u8]>>) -> StdError<()> {
let mut post_data = vec![("status", Cow::Borrowed(text))]; let mut post_data = vec![("status", Cow::Borrowed(text))];
if let Some(img) = img { if let Some(img) = img {
println!("Uploading image..."); println!("Uploading image...");
let img_id = upload_image(&user_token, img)?; let img_id = upload_image(&user_token, img).await?;
post_data.push(("media_ids", Cow::Owned(img_id.to_string()))) post_data.push(("media_ids", Cow::Owned(img_id.to_string())))
} }
event!(Level::INFO, "Sending tweet..."); event!(Level::INFO, "Sending tweet...");
@ -271,6 +281,7 @@ pub fn tweet(text: &str, img: Option<Cow<'static, [u8]>>) -> StdError<()> {
Some(&user_token), Some(&user_token),
APIAction::Post(PostData::Data(&post_data[0..])), APIAction::Post(PostData::Data(&post_data[0..])),
&[], &[],
)?; )
.await?;
Ok(()) Ok(())
} }

View File

@ -14,7 +14,7 @@ fn extract_filename(filename: &str) -> Option<&str> {
} }
// Scrape all images from the wikipedia page, returning a vec of title, filename pairs // Scrape all images from the wikipedia page, returning a vec of title, filename pairs
pub fn scrape_web() -> StdError<Vec<(String, String)>> { pub async fn scrape_web() -> StdError<Vec<(String, String)>> {
event!(Level::INFO, "Scraping the wikipedia page for things"); event!(Level::INFO, "Scraping the wikipedia page for things");
// Parse CSS selectors to scrape elements // Parse CSS selectors to scrape elements
let gallerybox_sel = let gallerybox_sel =
@ -27,8 +27,10 @@ pub fn scrape_web() -> StdError<Vec<(String, String)>> {
event!(Level::INFO, "Fetching wiki page"); event!(Level::INFO, "Fetching wiki page");
let txt = client let txt = client
.get("https://en.wikipedia.org/wiki/ISO_7010") .get("https://en.wikipedia.org/wiki/ISO_7010")
.send()? .send()
.text()?; .await?
.text()
.await?;
let page = scraper::Html::parse_document(txt.as_str()); let page = scraper::Html::parse_document(txt.as_str());
return Ok(page return Ok(page
.select(&gallerybox_sel) .select(&gallerybox_sel)
@ -62,7 +64,7 @@ pub fn wiki_query_url(params: Vec<(&str, &str)>) -> StdError<Url> {
// https://commons.wikimedia.org/w/api.php?action=query&format=json&list=categorymembers&cmtitle=Category:ISO_7010_safety_signs_(vector_drawings)&cmlimit=2 // https://commons.wikimedia.org/w/api.php?action=query&format=json&list=categorymembers&cmtitle=Category:ISO_7010_safety_signs_(vector_drawings)&cmlimit=2
#[instrument] #[instrument]
pub fn get_files_in_category(category: &str) -> StdError<Vec<String>> { pub async fn get_files_in_category(category: &str) -> StdError<Vec<String>> {
let client = get_client(None)?; let client = get_client(None)?;
let url = wiki_query_url( let url = wiki_query_url(
[ [
@ -73,7 +75,12 @@ pub fn get_files_in_category(category: &str) -> StdError<Vec<String>> {
] ]
.into(), .into(),
)?; )?;
let data = client.get(url).send()?.json::<serde_json::Value>()?; let data = client
.get(url)
.send()
.await?
.json::<serde_json::Value>()
.await?;
if data.get("continue").is_some() { if data.get("continue").is_some() {
// There are more results than are contained in one response, so now you need to implement // There are more results than are contained in one response, so now you need to implement
// pagination. Have fun! // pagination. Have fun!
@ -147,49 +154,54 @@ struct ExtMetaItem<T> {
value: T, value: T,
} }
pub fn get_file_metadata(files: &[&str]) -> StdError<Vec<FileMeta>> { pub async fn get_file_metadata(files: &[&str]) -> StdError<Vec<FileMeta>> {
let client = get_client(None)?; let client = get_client(None)?;
// Api only lets us do 50 files in one request // Api only lets us do 50 files in one request
Ok(files let urls = files.chunks(50).map(|files_chunk| {
.chunks(50) wiki_query_url(
.flat_map(|files_chunk| { [
let url = wiki_query_url( ("titles", files_chunk.join("|").as_ref()),
[ ("prop", "imageinfo"),
("titles", files_chunk.join("|").as_ref()), (
("prop", "imageinfo"), "iiprop",
( "timestamp|url|size|mime|mediatype|extmetadata|user",
"iiprop", ),
"timestamp|url|size|mime|mediatype|extmetadata|user", // Get metadata for as many revisions of the file as we are allowed. We're unlikely to encounter a file with >500 revisions.
), ("iilimit", "500"),
// Get metadata for as many revisions of the file as we are allowed. We're unlikely to encounter a file with >500 revisions. (
("iilimit", "500"), "iiextmetadatafilter",
( "ObjectName|LicenseShortName|AttributionRequired|LicenseUrl",
"iiextmetadatafilter", ),
"ObjectName|LicenseShortName|AttributionRequired|LicenseUrl", ]
), .into(),
] )
.into(), .unwrap()
) });
.unwrap();
let data = client.get(url).send().unwrap().json::<Query>().unwrap();
data.query let mut meta = Vec::new();
.pages for u in urls {
.values() let data = client
.map(|page| { .get(u)
let latest = page.imageinfo.first().unwrap(); .send()
let oldest = page.imageinfo.last().unwrap(); .await
FileMeta { .unwrap()
url: latest.url.clone(), .json::<Query>()
name: latest.extmetadata.object_name.value.clone(), .await
html_url: latest.descriptionurl.clone(), .unwrap();
author: oldest.user.clone(),
license_short_name: latest.extmetadata.license_short_name.value.clone(), meta.extend(data.query.pages.values().map(|page| {
license_url: latest.extmetadata.license_url.clone().map(|i| i.value), let latest = page.imageinfo.first().unwrap();
attribution_required: latest.extmetadata.attribution_required.value.clone(), let oldest = page.imageinfo.last().unwrap();
} FileMeta {
}) url: latest.url.clone(),
.collect::<Vec<_>>() name: latest.extmetadata.object_name.value.clone(),
}) html_url: latest.descriptionurl.clone(),
.collect()) author: oldest.user.clone(),
license_short_name: latest.extmetadata.license_short_name.value.clone(),
license_url: latest.extmetadata.license_url.clone().map(|i| i.value),
attribution_required: latest.extmetadata.attribution_required.value.clone(),
}
}))
}
Ok(meta)
} }