Skip to content

Commit

Permalink
Merge pull request #115 from m-appel/114-add-expire_after-to-cachedse…
Browse files Browse the repository at this point in the history
…ssion

Add expire_after parameter to CachedSession
  • Loading branch information
romain-fontugne authored Jan 25, 2024
2 parents 98f9701 + 1037db9 commit 7147d3e
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 7 deletions.
5 changes: 5 additions & 0 deletions config.json.example
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
{
"cache": {
"directory": "tmp/",
"duration_in_days": 6
},

"peeringdb": {
"apikey": ""
},
Expand Down
12 changes: 10 additions & 2 deletions iyp/crawlers/peeringdb/fac.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging
import os
import sys
from datetime import timedelta

import flatdict
import iso3166
Expand All @@ -24,16 +25,23 @@
FACID_LABEL = 'PeeringdbFacID'

API_KEY = ''
CACHE_DIR = ''
CACHE_DURATION = requests_cache.DO_NOT_CACHE
if os.path.exists('config.json'):
API_KEY = json.load(open('config.json', 'r'))['peeringdb']['apikey']
with open('config.json', 'r') as f:
config = json.load(f)
API_KEY = config['peeringdb']['apikey']
CACHE_DIR = config['cache']['directory']
CACHE_DURATION = timedelta(days=config['cache']['duration_in_days'])
del config # Do not leave as a global variable.


class Crawler(BaseCrawler):
def __init__(self, organization, url, name):
"""Initialisation for pushing peeringDB facilities to IYP."""

self.headers = {'Authorization': 'Api-Key ' + API_KEY}
self.requests = requests_cache.CachedSession(ORG)
self.requests = requests_cache.CachedSession(os.path.join(CACHE_DIR, ORG), expire_after=CACHE_DURATION)

super().__init__(organization, url, name)

Expand Down
13 changes: 10 additions & 3 deletions iyp/crawlers/peeringdb/ix.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
import os
import sys
from datetime import datetime, time, timezone
from datetime import datetime, time, timedelta, timezone

import flatdict
import requests_cache
Expand Down Expand Up @@ -34,8 +34,15 @@
FACID_LABEL = 'PeeringdbFacID'

API_KEY = ''
CACHE_DIR = ''
CACHE_DURATION = requests_cache.DO_NOT_CACHE
if os.path.exists('config.json'):
API_KEY = json.load(open('config.json', 'r'))['peeringdb']['apikey']
with open('config.json', 'r') as f:
config = json.load(f)
API_KEY = config['peeringdb']['apikey']
CACHE_DIR = config['cache']['directory']
CACHE_DURATION = timedelta(days=config['cache']['duration_in_days'])
del config # Do not leave as a global variable.


def handle_social_media(d: dict, website_set: set = None):
Expand Down Expand Up @@ -82,7 +89,7 @@ def __init__(self, organization, url, name):
self.nets = {}

# Using cached queries
self.requests = requests_cache.CachedSession(ORG)
self.requests = requests_cache.CachedSession(os.path.join(CACHE_DIR, ORG), expire_after=CACHE_DURATION)

# connection to IYP database
super().__init__(organization, url, name)
Expand Down
12 changes: 10 additions & 2 deletions iyp/crawlers/peeringdb/org.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging
import os
import sys
from datetime import timedelta

import flatdict
import iso3166
Expand All @@ -21,16 +22,23 @@
ORGID_LABEL = 'PeeringdbOrgID'

API_KEY = ''
CACHE_DIR = ''
CACHE_DURATION = requests_cache.DO_NOT_CACHE
if os.path.exists('config.json'):
API_KEY = json.load(open('config.json', 'r'))['peeringdb']['apikey']
with open('config.json', 'r') as f:
config = json.load(f)
API_KEY = config['peeringdb']['apikey']
CACHE_DIR = config['cache']['directory']
CACHE_DURATION = timedelta(days=config['cache']['duration_in_days'])
del config # Do not leave as a global variable.


class Crawler(BaseCrawler):
def __init__(self, organization, url, name):
"""Initialisation for pushing peeringDB organizations to IYP."""

self.headers = {'Authorization': 'Api-Key ' + API_KEY}
self.requests = requests_cache.CachedSession(ORG)
self.requests = requests_cache.CachedSession(os.path.join(CACHE_DIR, ORG), expire_after=CACHE_DURATION)

super().__init__(organization, url, name)

Expand Down

0 comments on commit 7147d3e

Please sign in to comment.