Improve handling of wiki pages that no longer exist
This commit is contained in:
parent
e10c8ac1e5
commit
d71b4956c8
2 changed files with 20 additions and 11 deletions
16
src/steam.rs
16
src/steam.rs
|
@ -3,6 +3,8 @@ use std::{
|
||||||
process::Command,
|
process::Command,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{placeholder, Os},
|
manifest::{placeholder, Os},
|
||||||
resource::ResourceFile,
|
resource::ResourceFile,
|
||||||
|
@ -47,7 +49,13 @@ impl SteamCache {
|
||||||
|
|
||||||
let info = ProductInfo::fetch(app_ids)?;
|
let info = ProductInfo::fetch(app_ids)?;
|
||||||
for app_id in app_ids {
|
for app_id in app_ids {
|
||||||
let latest = SteamCacheEntry::parse_app(*app_id, &info)?;
|
let latest = match SteamCacheEntry::parse_app(*app_id, &info) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => {
|
||||||
|
println!("Steam: {app_id} - failed");
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
self.0.insert(
|
self.0.insert(
|
||||||
*app_id,
|
*app_id,
|
||||||
latest.unwrap_or_else(|| SteamCacheEntry {
|
latest.unwrap_or_else(|| SteamCacheEntry {
|
||||||
|
@ -210,7 +218,7 @@ struct ProductInfo {
|
||||||
|
|
||||||
impl ProductInfo {
|
impl ProductInfo {
|
||||||
fn fetch(app_ids: &[u32]) -> Result<ProductInfo, Error> {
|
fn fetch(app_ids: &[u32]) -> Result<ProductInfo, Error> {
|
||||||
println!("Steam batch: {:?} to {:?}", app_ids.first(), app_ids.last());
|
println!("Steam batch: {}", app_ids.iter().join(", "));
|
||||||
|
|
||||||
let mut cmd = Command::new("python");
|
let mut cmd = Command::new("python");
|
||||||
cmd.arg(format!("{}/scripts/get-steam-app-info.py", REPO));
|
cmd.arg(format!("{}/scripts/get-steam-app-info.py", REPO));
|
||||||
|
@ -410,10 +418,8 @@ mod product_info {
|
||||||
|
|
||||||
impl SteamCacheEntry {
|
impl SteamCacheEntry {
|
||||||
fn parse_app(app_id: u32, info: &ProductInfo) -> Result<Option<Self>, Error> {
|
fn parse_app(app_id: u32, info: &ProductInfo) -> Result<Option<Self>, Error> {
|
||||||
println!("Steam: {}", app_id);
|
|
||||||
|
|
||||||
let Some(app) = info.response.apps.get(&app_id.to_string()).cloned() else {
|
let Some(app) = info.response.apps.get(&app_id.to_string()).cloned() else {
|
||||||
eprintln!("No results for Steam ID: {}", app_id);
|
eprintln!("Steam: {app_id} - no results");
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
15
src/wiki.rs
15
src/wiki.rs
|
@ -269,7 +269,6 @@ impl WikiCache {
|
||||||
from: Option<String>,
|
from: Option<String>,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let solo = titles.is_some();
|
|
||||||
let titles: Vec<_> = titles.unwrap_or_else(|| {
|
let titles: Vec<_> = titles.unwrap_or_else(|| {
|
||||||
self.0
|
self.0
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -295,15 +294,14 @@ impl WikiCache {
|
||||||
self.0.insert(title.to_string(), latest);
|
self.0.insert(title.to_string(), latest);
|
||||||
}
|
}
|
||||||
Err(Error::PageMissing) => {
|
Err(Error::PageMissing) => {
|
||||||
if solo {
|
|
||||||
return Err(Error::PageMissing);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Couldn't find it by name, so try again by ID.
|
// Couldn't find it by name, so try again by ID.
|
||||||
// This can happen for pages moved without leaving a redirect.
|
// This can happen for pages moved without leaving a redirect.
|
||||||
// (If they have a redirect, then the recent changes code takes care of it.)
|
// (If they have a redirect, then the recent changes code takes care of it.)
|
||||||
let Some(new_title) = get_page_title(cached.page_id).await? else {
|
let Some(new_title) = get_page_title(cached.page_id).await? else {
|
||||||
return Err(Error::PageMissing);
|
// Page no longer exists.
|
||||||
|
println!(":: refesh: page no longer exists");
|
||||||
|
self.0.remove(title);
|
||||||
|
continue;
|
||||||
};
|
};
|
||||||
println!(
|
println!(
|
||||||
":: refresh: page {} called '{}' renamed to '{}'",
|
":: refresh: page {} called '{}' renamed to '{}'",
|
||||||
|
@ -371,6 +369,11 @@ impl WikiCacheEntry {
|
||||||
.get_query_api_json_all(¶ms)
|
.get_query_api_json_all(¶ms)
|
||||||
.await
|
.await
|
||||||
.map_err(|_| Error::PageMissing)?;
|
.map_err(|_| Error::PageMissing)?;
|
||||||
|
|
||||||
|
if res["error"]["code"].as_str() == Some("missingtitle") {
|
||||||
|
return Err(Error::PageMissing);
|
||||||
|
}
|
||||||
|
|
||||||
out.page_id = res["parse"]["pageid"].as_u64().ok_or(Error::WikiData("parse.pageid"))?;
|
out.page_id = res["parse"]["pageid"].as_u64().ok_or(Error::WikiData("parse.pageid"))?;
|
||||||
let raw_wikitext = res["parse"]["wikitext"]["*"]
|
let raw_wikitext = res["parse"]["wikitext"]["*"]
|
||||||
.as_str()
|
.as_str()
|
||||||
|
|
Reference in a new issue