Improve handling of wiki pages that no longer exist
This commit is contained in:
parent
e10c8ac1e5
commit
d71b4956c8
2 changed files with 20 additions and 11 deletions
16
src/steam.rs
16
src/steam.rs
|
@ -3,6 +3,8 @@ use std::{
|
|||
process::Command,
|
||||
};
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{
|
||||
manifest::{placeholder, Os},
|
||||
resource::ResourceFile,
|
||||
|
@ -47,7 +49,13 @@ impl SteamCache {
|
|||
|
||||
let info = ProductInfo::fetch(app_ids)?;
|
||||
for app_id in app_ids {
|
||||
let latest = SteamCacheEntry::parse_app(*app_id, &info)?;
|
||||
let latest = match SteamCacheEntry::parse_app(*app_id, &info) {
|
||||
Ok(x) => x,
|
||||
Err(e) => {
|
||||
println!("Steam: {app_id} - failed");
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
self.0.insert(
|
||||
*app_id,
|
||||
latest.unwrap_or_else(|| SteamCacheEntry {
|
||||
|
@ -210,7 +218,7 @@ struct ProductInfo {
|
|||
|
||||
impl ProductInfo {
|
||||
fn fetch(app_ids: &[u32]) -> Result<ProductInfo, Error> {
|
||||
println!("Steam batch: {:?} to {:?}", app_ids.first(), app_ids.last());
|
||||
println!("Steam batch: {}", app_ids.iter().join(", "));
|
||||
|
||||
let mut cmd = Command::new("python");
|
||||
cmd.arg(format!("{}/scripts/get-steam-app-info.py", REPO));
|
||||
|
@ -410,10 +418,8 @@ mod product_info {
|
|||
|
||||
impl SteamCacheEntry {
|
||||
fn parse_app(app_id: u32, info: &ProductInfo) -> Result<Option<Self>, Error> {
|
||||
println!("Steam: {}", app_id);
|
||||
|
||||
let Some(app) = info.response.apps.get(&app_id.to_string()).cloned() else {
|
||||
eprintln!("No results for Steam ID: {}", app_id);
|
||||
eprintln!("Steam: {app_id} - no results");
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
|
|
15
src/wiki.rs
15
src/wiki.rs
|
@ -269,7 +269,6 @@ impl WikiCache {
|
|||
from: Option<String>,
|
||||
) -> Result<(), Error> {
|
||||
let mut i = 0;
|
||||
let solo = titles.is_some();
|
||||
let titles: Vec<_> = titles.unwrap_or_else(|| {
|
||||
self.0
|
||||
.iter()
|
||||
|
@ -295,15 +294,14 @@ impl WikiCache {
|
|||
self.0.insert(title.to_string(), latest);
|
||||
}
|
||||
Err(Error::PageMissing) => {
|
||||
if solo {
|
||||
return Err(Error::PageMissing);
|
||||
}
|
||||
|
||||
// Couldn't find it by name, so try again by ID.
|
||||
// This can happen for pages moved without leaving a redirect.
|
||||
// (If they have a redirect, then the recent changes code takes care of it.)
|
||||
let Some(new_title) = get_page_title(cached.page_id).await? else {
|
||||
return Err(Error::PageMissing);
|
||||
// Page no longer exists.
|
||||
println!(":: refesh: page no longer exists");
|
||||
self.0.remove(title);
|
||||
continue;
|
||||
};
|
||||
println!(
|
||||
":: refresh: page {} called '{}' renamed to '{}'",
|
||||
|
@ -371,6 +369,11 @@ impl WikiCacheEntry {
|
|||
.get_query_api_json_all(¶ms)
|
||||
.await
|
||||
.map_err(|_| Error::PageMissing)?;
|
||||
|
||||
if res["error"]["code"].as_str() == Some("missingtitle") {
|
||||
return Err(Error::PageMissing);
|
||||
}
|
||||
|
||||
out.page_id = res["parse"]["pageid"].as_u64().ok_or(Error::WikiData("parse.pageid"))?;
|
||||
let raw_wikitext = res["parse"]["wikitext"]["*"]
|
||||
.as_str()
|
||||
|
|
Reference in a new issue