Merge branch 'Seb35-issue-4'
This commit is contained in:
commit
014fe9fec5
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
__pycache__/
|
10
README.md
10
README.md
@ -19,6 +19,8 @@ You need Python 3 and [Requests](http://python-requests.org/). You can install R
|
|||||||
|
|
||||||
Then, copy the script `check_expire` and the file `ianardap.py`to the directory of local plugins.
|
Then, copy the script `check_expire` and the file `ianardap.py`to the directory of local plugins.
|
||||||
|
|
||||||
|
The use of a cache directory is not mandatory, but highly recommended. It can be either `$XDG_CACHE_HOME/ianardap` or `$HOME/.ianardapcaches` depending on the environment variables set.
|
||||||
|
|
||||||
## Icinga configuration
|
## Icinga configuration
|
||||||
|
|
||||||
If you use Icinga, here is a possible definition of the command:
|
If you use Icinga, here is a possible definition of the command:
|
||||||
@ -35,6 +37,7 @@ object CheckCommand "expiration" {
|
|||||||
"-v" = { set_if = "$expiration_verbose$" }
|
"-v" = { set_if = "$expiration_verbose$" }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
env.XDG_CACHE_HOME = "/var/cache/nagios"
|
||||||
}
|
}
|
||||||
|
|
||||||
apply Service "expiration" {
|
apply Service "expiration" {
|
||||||
@ -60,6 +63,13 @@ object Host "bortzmeyer-org" {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If needed, create the cache directory accordingly:
|
||||||
|
|
||||||
|
```
|
||||||
|
mkdir /var/cache/nagios
|
||||||
|
chown nagios: /var/cache/nagios
|
||||||
|
```
|
||||||
|
|
||||||
## Zabbix configuration
|
## Zabbix configuration
|
||||||
|
|
||||||
For monitoring systems that do not rely on exit codes but on calculation mechanism based on the metric they receive you can use the `-u` option that will only return the expiration date in unixtime format.
|
For monitoring systems that do not rely on exit codes but on calculation mechanism based on the metric they receive you can use the `-u` option that will only return the expiration date in unixtime format.
|
||||||
|
@ -53,6 +53,8 @@ def usage(msg=None):
|
|||||||
print("")
|
print("")
|
||||||
|
|
||||||
def details():
|
def details():
|
||||||
|
if database and not database.cachefile:
|
||||||
|
print(" (WARNING: no cache directory used, please set environement variable XDG_CACHE_HOME or HOME.)", end="")
|
||||||
if verbose:
|
if verbose:
|
||||||
print(" RDAP database \"%s\", version %s published on %s, retrieved on %s, RDAP server is %s" % \
|
print(" RDAP database \"%s\", version %s published on %s, retrieved on %s, RDAP server is %s" % \
|
||||||
(database.description, database.version, database.publication, database.retrieved, server))
|
(database.description, database.version, database.publication, database.retrieved, server))
|
||||||
|
57
ianardap.py
57
ianardap.py
@ -21,7 +21,8 @@ IANABASES = {"domains": "https://data.iana.org/rdap/dns.json",
|
|||||||
"v6prefixes": "https://data.iana.org/rdap/ipv6.json",
|
"v6prefixes": "https://data.iana.org/rdap/ipv6.json",
|
||||||
"as": "https://data.iana.org/rdap/asn.json",
|
"as": "https://data.iana.org/rdap/asn.json",
|
||||||
"objects": "https://data.iana.org/rdap/object-tags.json"}
|
"objects": "https://data.iana.org/rdap/object-tags.json"}
|
||||||
CACHE = os.environ["HOME"] + "/.ianardapcaches"
|
CACHE = os.environ["XDG_CACHE_HOME"] + "/ianardap" if "XDG_CACHE_HOME" in os.environ else \
|
||||||
|
(os.environ["HOME"] + "/.ianardapcaches" if "HOME" in os.environ else None)
|
||||||
MAXAGE = 24 # Hours. Used only if the server no longer gives the information.
|
MAXAGE = 24 # Hours. Used only if the server no longer gives the information.
|
||||||
IANATIMEOUT = 10 # Seconds
|
IANATIMEOUT = 10 # Seconds
|
||||||
MAXTESTS = 3 # Maximum attempts to get the database
|
MAXTESTS = 3 # Maximum attempts to get the database
|
||||||
@ -60,28 +61,33 @@ file (see the documentation of the module).
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
cache_valid = False
|
cache_valid = False
|
||||||
if not os.path.exists(cachedir):
|
|
||||||
os.mkdir(cachedir)
|
|
||||||
self.category = category
|
self.category = category
|
||||||
cachefile = os.path.join(cachedir, category)
|
self.cachefile = None
|
||||||
if pickleformat:
|
self.lockname = None
|
||||||
self.cachefile = cachefile + ".pickle"
|
self.expirationfile = None
|
||||||
else:
|
if cachedir:
|
||||||
self.cachefile = cachefile + ".json"
|
if not os.path.exists(cachedir):
|
||||||
self.lockname = self.cachefile + ".lock"
|
os.mkdir(cachedir)
|
||||||
self.expirationfile = self.cachefile + ".expires"
|
cachefile = os.path.join(cachedir, category)
|
||||||
|
if pickleformat:
|
||||||
|
self.cachefile = cachefile + ".pickle"
|
||||||
|
else:
|
||||||
|
self.cachefile = cachefile + ".json"
|
||||||
|
self.lockname = self.cachefile + ".lock"
|
||||||
|
self.expirationfile = self.cachefile + ".expires"
|
||||||
if maxage is not None:
|
if maxage is not None:
|
||||||
with open(self.expirationfile, 'w'):
|
self.expirationtime = time.mktime((datetime.datetime.now() + \
|
||||||
self.expirationtime = time.mktime((datetime.datetime.now() + \
|
datetime.timedelta(hours=maxage)).timetuple())
|
||||||
datetime.timedelta(hours=maxage)).timetuple())
|
if self.expirationfile:
|
||||||
os.utime(self.expirationfile,
|
with open(self.expirationfile, 'w'):
|
||||||
times = (self.expirationtime, self.expirationtime))
|
os.utime(self.expirationfile,
|
||||||
|
times = (self.expirationtime, self.expirationtime))
|
||||||
loaded = False
|
loaded = False
|
||||||
tests = 0
|
tests = 0
|
||||||
errmsg = "No error"
|
errmsg = "No error"
|
||||||
while not loaded and tests < MAXTESTS:
|
while not loaded and tests < MAXTESTS:
|
||||||
self.lock()
|
self.lock()
|
||||||
if os.path.exists(self.cachefile) and \
|
if self.cachefile and os.path.exists(self.cachefile) and \
|
||||||
(pathlib.Path(self.expirationfile).exists() and \
|
(pathlib.Path(self.expirationfile).exists() and \
|
||||||
datetime.datetime.fromtimestamp(os.path.getmtime(self.expirationfile)) > \
|
datetime.datetime.fromtimestamp(os.path.getmtime(self.expirationfile)) > \
|
||||||
datetime.datetime.now()):
|
datetime.datetime.now()):
|
||||||
@ -139,9 +145,10 @@ file (see the documentation of the module).
|
|||||||
try:
|
try:
|
||||||
content = response.content
|
content = response.content
|
||||||
database = json.loads(content)
|
database = json.loads(content)
|
||||||
with open(self.expirationfile, 'w'):
|
if self.expirationfile:
|
||||||
os.utime(self.expirationfile,
|
with open(self.expirationfile, 'w'):
|
||||||
times = (self.expirationtime, self.expirationtime))
|
os.utime(self.expirationfile,
|
||||||
|
times = (self.expirationtime, self.expirationtime))
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
tests += 1
|
tests += 1
|
||||||
errmsg = "Invalid JSON retrieved from %s" % IANABASE
|
errmsg = "Invalid JSON retrieved from %s" % IANABASE
|
||||||
@ -173,7 +180,7 @@ file (see the documentation of the module).
|
|||||||
else: # IP addresses will be complicated, because of the
|
else: # IP addresses will be complicated, because of the
|
||||||
# longest prefix rule.
|
# longest prefix rule.
|
||||||
raise Exception("Unsupported category %s" % self.category)
|
raise Exception("Unsupported category %s" % self.category)
|
||||||
if not cache_valid:
|
if self.cachefile and not cache_valid:
|
||||||
self.lock()
|
self.lock()
|
||||||
cache = open(self.cachefile, "wb")
|
cache = open(self.cachefile, "wb")
|
||||||
if pickleformat:
|
if pickleformat:
|
||||||
@ -184,12 +191,14 @@ file (see the documentation of the module).
|
|||||||
self.unlock()
|
self.unlock()
|
||||||
|
|
||||||
def lock(self):
|
def lock(self):
|
||||||
self.lockhandle = open(self.lockname, 'w')
|
if self.lockname:
|
||||||
fcntl.lockf(self.lockhandle, fcntl.LOCK_EX)
|
self.lockhandle = open(self.lockname, 'w')
|
||||||
|
fcntl.lockf(self.lockhandle, fcntl.LOCK_EX)
|
||||||
|
|
||||||
def unlock(self):
|
def unlock(self):
|
||||||
fcntl.lockf(self.lockhandle, fcntl.LOCK_UN)
|
if self.lockname:
|
||||||
self.lockhandle.close()
|
fcntl.lockf(self.lockhandle, fcntl.LOCK_UN)
|
||||||
|
self.lockhandle.close()
|
||||||
|
|
||||||
def find(self, id):
|
def find(self, id):
|
||||||
"""Get the RDAP server(s), as an array, for a given identifier. None
|
"""Get the RDAP server(s), as an array, for a given identifier. None
|
||||||
|
@ -29,8 +29,9 @@ def test_refresh():
|
|||||||
# Force a resfresh
|
# Force a resfresh
|
||||||
database = ianardap.IanaRDAPDatabase(maxage=0)
|
database = ianardap.IanaRDAPDatabase(maxage=0)
|
||||||
assert (database.retrieved > (datetime.datetime.now() - datetime.timedelta(minutes=1))) and \
|
assert (database.retrieved > (datetime.datetime.now() - datetime.timedelta(minutes=1))) and \
|
||||||
|
((not database.cachefile) or \
|
||||||
(datetime.datetime.fromtimestamp(os.path.getmtime(database.cachefile)) > \
|
(datetime.datetime.fromtimestamp(os.path.getmtime(database.cachefile)) > \
|
||||||
(datetime.datetime.now() - datetime.timedelta(minutes=1)))
|
(datetime.datetime.now() - datetime.timedelta(minutes=1))))
|
||||||
|
|
||||||
def test_find_exists():
|
def test_find_exists():
|
||||||
database = ianardap.IanaRDAPDatabase()
|
database = ianardap.IanaRDAPDatabase()
|
||||||
|
Loading…
Reference in New Issue
Block a user