Skip to content

Commit

Permalink
gameloot.xml > lootscraper.xml
Browse files Browse the repository at this point in the history
  • Loading branch information
eikowagenknecht committed Mar 11, 2022
1 parent 4e8a9c6 commit 32c7b4b
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 22 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ You can either run this script locally on your computer or in any environment ca

Just want the feeds? Sure. You can use the links below. They are updated every 20 minutes and contain offers for Amazon Prime (games and ingame loot) and Epic Games (games only). Currently the following feeds are available:

- <https://feed.phenx.de/gameloot.xml>: Everything (Amazon Prime games and ingame loot)
- <https://feed.phenx.de/gameloot_epic_game.xml>: Epic games only
- <https://feed.phenx.de/gameloot_amazon_game.xml>: Amazon Prime games only
- <https://feed.phenx.de/gameloot_amazon_loot.xml>: Amazon Prime ingame loot only
- <https://feed.phenx.de/lootscraper.xml>: Everything (Amazon Prime games and ingame loot)
- <https://feed.phenx.de/lootscraper_epic_game.xml>: Epic games only
- <https://feed.phenx.de/lootscraper_amazon_game.xml>: Amazon Prime games only
- <https://feed.phenx.de/lootscraper_amazon_loot.xml>: Amazon Prime ingame loot only

This is what it currently looks like in Feedly:

Expand Down
27 changes: 16 additions & 11 deletions app/feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

def generate_feed(
offers: list[LootOffer],
out_file: Path,
feed_file_base: Path,
author_name: str,
author_mail: str,
author_web: str,
Expand All @@ -33,6 +33,13 @@ def generate_feed(
if len(offers) == 0:
return

if source is not None and type is not None:
file = feed_file_base.with_stem(
f"{feed_file_base.stem}_{source.name.lower()}_{type.name.lower()}"
)
else:
file = feed_file_base

feed_generator = FeedGenerator()
latest_date: datetime = None

Expand Down Expand Up @@ -100,11 +107,12 @@ def generate_feed(
# XML
feed_generator.language("en")
# Atom Needed
feed_generator.id(feed_id_prefix + get_feed_id(out_file.name))
feed_id = get_feed_id(file.name) if file.name != feed_file_base.name else ""
feed_generator.id(feed_id_prefix + feed_id)
feed_generator.title(get_feed_title(source, type))
feed_generator.updated(latest_date)
# Atom Recommended
feed_generator.link(rel="self", href=f"{feed_url_prefix}{out_file.name}")
feed_generator.link(rel="self", href=f"{feed_url_prefix}{file.name}")
feed_generator.link(rel="alternate", href=feed_url_alternate)
feed_generator.author(
{
Expand All @@ -126,16 +134,13 @@ def generate_feed(
# - Subtitle

# Write the ATOM feed to a file
feed_generator.atom_file(filename=str(out_file), pretty=True)
feed_generator.atom_file(filename=str(file), pretty=True)


def get_feed_id(filename: str) -> str:
if filename == "gameloot.xml":
return ""
else:
# Use the part between "gameloot_" and ".xml" as the feed id
subfeed = filename.split("_", 1)[1][0:-4]
return subfeed
# Use the part between "<base_filename>_" and ".xml" as the feed id
subfeed = filename.split("_", 1)[1][0:-4]
return subfeed


def get_feed_title(source: Source | None, type: OfferType | None):
Expand All @@ -144,7 +149,7 @@ def get_feed_title(source: Source | None, type: OfferType | None):

title = "Free"
if source is not None:
title += f" {source.value}"
title += " " + source.value
if type is not None:
match OfferType:
case OfferType.GAME:
Expand Down
2 changes: 1 addition & 1 deletion config.default.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[common]
; Path is /data is a file /data/config.ini exists, otherwise the relative directory data
DatabaseFile = loot.db
FeedFilePrefix = gameloot
FeedFilePrefix = lootscraper
LogFile = lootscraper.log
; One of: DEBUG, INFO, WARNING, ERROR, CRITICAL
Loglevel = INFO
Expand Down
12 changes: 6 additions & 6 deletions lootscraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,9 @@ def job() -> None:
cfg_feed_id_prefix: str = Config.config()["feed"]["FeedIdPrefix"] # type: ignore

if cfg_generate_feed:
feed_file_base = Config.data_path() / Path(
Config.config()["common"]["FeedFilePrefix"] + ".xml"
)
# Generate and upload feeds split by source
any_feed_changed = False
for scraper_source in db_offers:
Expand All @@ -165,7 +168,7 @@ def job() -> None:
old_hash = hash_file(feed_file)
generate_feed(
offers=db_offers[scraper_source][scraper_type],
out_file=feed_file,
feed_file_base=feed_file_base,
author_name=cfg_author_name,
author_web=cfg_author_web,
author_mail=cfg_author_mail,
Expand All @@ -190,12 +193,9 @@ def job() -> None:
all_offers.extend(db_offers[scraper_source][scraper_type])

if any_feed_changed:
feed_file = Config.data_path() / Path(
Config.config()["common"]["FeedFilePrefix"] + ".xml"
)
generate_feed(
offers=all_offers,
out_file=feed_file,
feed_file_base=feed_file_base,
author_name=cfg_author_name,
author_web=cfg_author_web,
author_mail=cfg_author_mail,
Expand All @@ -204,7 +204,7 @@ def job() -> None:
feed_id_prefix=cfg_feed_id_prefix,
)
if cfg_upload:
upload_to_server(feed_file)
upload_to_server(feed_file_base)
else:
logging.info("Skipping upload, disabled")

Expand Down

0 comments on commit 32c7b4b

Please sign in to comment.