Format and fix clippy warnings

This commit is contained in:
Sam W 2022-09-13 19:14:04 +01:00
parent 73ae1a8613
commit 49eef28056
2 changed files with 20 additions and 18 deletions

View File

@ -6,10 +6,10 @@ mod twitter;
mod wiki;
use clap::{Parser, Subcommand};
use image::{DynamicImage, RgbaImage};
use std::borrow::Cow;
use tiny_skia::{Paint, PathBuilder, Pixmap, PixmapPaint, Stroke, Transform};
use twitter::*;
use wiki::*;
use std::borrow::Cow;
static APP_USER_AGENT: &str = concat!(
"bot_",
@ -145,37 +145,38 @@ fn do_list_tweets() -> StdError<()> {
.json::<serde_json::Value>()?;
let id = user["id"].as_u64().unwrap().to_string();
let mut timeline = vec!();
let mut timeline = vec![];
let mut last_id: Option<u64> = None;
loop {
loop {
event!(Level::INFO, last_id, "Fetching chunk of tweets...");
let mut params: Vec<(&str, Cow<str>)> = vec!(
let mut params: Vec<(&str, Cow<str>)> = vec![
("count", "200".into()),
("exclude_replies", "true".into()),
("include_rts", "false".into()),
("trim_user", "true".into()),
("user_id", Cow::Borrowed(&id)),
);
];
if let Some(since_id) = last_id {
// Get next chunk of tweets before those we've fetched
params.push(("max_id", (since_id - 1).to_string().into()))
}
let timeline_chunk = twitter_api(
reqwest::Url::parse_with_params(
&TwitterEndpoint::UserTimeline.to_string(),
params,
)?,
reqwest::Url::parse_with_params(&TwitterEndpoint::UserTimeline.to_string(), params)?,
Some(&user_token),
APIAction::Get,
&[],
)?
.json::<serde_json::Value>()?;
let chunk = timeline_chunk.as_array().unwrap().to_owned();
event!(Level::INFO, count=chunk.len(), "Got tweets.");
if chunk.len() == 0 {
event!(Level::INFO, count = chunk.len(), "Got tweets.");
if chunk.is_empty() {
break;
}
last_id = Some(chunk.last().unwrap().as_object().unwrap()["id"].as_u64().unwrap());
last_id = Some(
chunk.last().unwrap().as_object().unwrap()["id"]
.as_u64()
.unwrap(),
);
timeline.extend(chunk);
}
for tweet in timeline {
@ -216,16 +217,17 @@ fn get_client(headers: Option<reqwest::header::HeaderMap>) -> StdError<reqwest::
fn run_bot(dry_run: bool, target: Option<String>) -> StdError<()> {
let all = scrape_web()?;
let (title, filename) = if let Some(target) = target {
all.iter().find(|(title, _)| title.to_lowercase().contains(&target.to_lowercase()))
all.iter()
.find(|(title, _)| title.to_lowercase().contains(&target.to_lowercase()))
} else {
all
.choose(&mut rand::thread_rng())
}.ok_or("got no images m8")?;
all.choose(&mut rand::thread_rng())
}
.ok_or("got no images m8")?;
event!(Level::INFO, title, filename, "Picked random thing");
let client = get_client(None)?;
event!(Level::INFO, "Fetching metadata...");
// TODO: could crash, probably doesn't matter
let meta = get_file_metadata(vec![filename])?.remove(0);
let meta = get_file_metadata(&[filename.as_str()])?.remove(0);
event!(Level::INFO, %meta, "Got metadata");
event!(Level::INFO, url = meta.url.to_string(), "Fetching image");
let svg = client.get(meta.url).send()?.bytes()?;

View File

@ -156,7 +156,7 @@ struct ExtMetaItem<T> {
value: T,
}
pub fn get_file_metadata(files: Vec<&str>) -> StdError<Vec<FileMeta>> {
pub fn get_file_metadata(files: &[&str]) -> StdError<Vec<FileMeta>> {
let client = get_client(None)?;
// Api only lets us do 50 files in one request
Ok(files