Write exif metadata (Quick &Dirty)

This commit is contained in:
Stefal 2023-09-10 21:02:33 +02:00
parent a02ca67360
commit c79ba8a65d
15 changed files with 3771 additions and 4 deletions

2
.gitignore vendored
View File

@ -1 +1,3 @@
data/* data/*
lib/__pycache__/
lib/test

View File

@ -4,6 +4,7 @@ import os
import asyncio import asyncio
import argparse import argparse
from datetime import datetime from datetime import datetime
from lib.exif_write import ExifEdit
def parse_args(argv =None): def parse_args(argv =None):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -18,12 +19,48 @@ def background(f):
return asyncio.get_event_loop().run_in_executor(None, f, *args, **kwargs) return asyncio.get_event_loop().run_in_executor(None, f, *args, **kwargs)
return wrapped return wrapped
@background #@background
def download(url, fn): def download(url, fn, metadata=None):
r = requests.get(url, stream=True) r = requests.get(url, stream=True)
with open(str(fn), "wb") as f: with open(str(fn), "wb") as f:
f.write(r.content) f.write(r.content)
write_exif(fn, metadata)
def write_exif(filename, data):
'''
Write exif metadata
'''
#{'thumb_original_url': 'https://scontent-cdg4-2.xx.fbcdn.net/m1/v/t6/An9Zy2SrH9vXJIF01QkBODyUbg7XSKfwL48UwHyvihSwvECGjVbG0vSw9uhxe2-Dq-k2eUcigb83buO6zo-7eVbykfp5aQIe1kgd-MJr66nU_H-o_mwBLZXgVbj5I_5WX-C9c6FxJruHkV962F228O0?ccb=10-5&oh=00_AfDOKD869DxL-4ZNCbVo8Rn29vsc0JyjMAU2ctx4aAFVMQ&oe=65256C25&_nc_sid=201bca',
# 'captured_at': 1603459736644, 'geometry': {'type': 'Point', 'coordinates': [2.5174596904057, 48.777089857534]}, 'id': '485924785946693'}
lat = data['geometry']['coordinates'][1]
long = data['geometry']['coordinates'][0]
altitude = data['altitude']
bearing = data['compass_angle']
timestamp=datetime.utcfromtimestamp(int(data['captured_at'])/1000)
metadata = metadata = ExifEdit(filename)
#metadata.read()
try:
# add to exif
#metadata["Exif.GPSInfo.GPSLatitude"] = exiv_lat
#metadata["Exif.GPSInfo.GPSLatitudeRef"] = coordinates[3]
#metadata["Exif.GPSInfo.GPSLongitude"] = exiv_lon
#metadata["Exif.GPSInfo.GPSLongitudeRef"] = coordinates[7]
#metadata["Exif.GPSInfo.GPSMapDatum"] = "WGS-84"
#metadata["Exif.GPSInfo.GPSVersionID"] = '2 0 0 0'
#metadata["Exif.GPSInfo.GPSImgDirection"] = exiv_bearing
#metadata["Exif.GPSInfo.GPSImgDirectionRef"] = "T"
metadata.add_lat_lon(lat, long)
metadata.add_altitude(altitude)
metadata.add_date_time_original(timestamp)
metadata.add_direction(bearing)
metadata.write()
print("Added geodata to: {0}".format(filename))
except ValueError as e:
print("Skipping {0}: {1}".format(filename, e))
if __name__ == '__main__': if __name__ == '__main__':
parse_args() parse_args()
@ -62,10 +99,10 @@ if __name__ == '__main__':
r = requests.get(req_url, headers=header) r = requests.get(req_url, headers=header)
data = r.json() data = r.json()
print('getting url {} of {}'.format(x, img_num)) print('getting url {} of {}'.format(x, img_num))
#print(data['geometry']['coordinates'][1], data['geometry']['coordinates'][0])
urls.append(data) urls.append(data)
print('downloading.. this process will take a while. please wait') print('downloading.. this process will take a while. please wait')
for i,url in enumerate(urls): for i,url in enumerate(urls):
path = 'data/{}/{}.jpg'.format(sequence_id, datetime.utcfromtimestamp(int(url['captured_at'])/1000).strftime('%Y-%m-%d_%HH%Mmn%S.%f')) path = 'data/{}/{}.jpg'.format(sequence_id, datetime.utcfromtimestamp(int(url['captured_at'])/1000).strftime('%Y-%m-%d_%HH%Mmn%S.%f'))
print(path) download(url['thumb_original_url'],path, url)
download(url['thumb_original_url'],path)

17
lib/__init__.py Normal file
View File

@ -0,0 +1,17 @@
#from .geo import *
#from .exif_aux import *
#from .exif_read import *
#from .exif_write import *
#from .gps_parser import *
#from .gpmf import *
#import geo
#import exif_aux
#import exif_read
#import exif_write
#import gps_parser
#import gpmf
VERSION = "0.0.2"

385
lib/exif.py Normal file
View File

@ -0,0 +1,385 @@
#!/usr/bin/env python
import os
import sys
import exifread
import datetime
from lib.geo import normalize_bearing
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
def eval_frac(value):
return float(value.num) / float(value.den)
def exif_gps_fields():
'''
GPS fields in EXIF
'''
return [
["GPS GPSLongitude", "EXIF GPS GPSLongitude"],
["GPS GPSLatitude", "EXIF GPS GPSLatitude"]
]
def exif_datetime_fields():
'''
Date time fields in EXIF
'''
return [["EXIF DateTimeOriginal",
"Image DateTimeOriginal",
"EXIF DateTimeDigitized",
"Image DateTimeDigitized",
"EXIF DateTime"
"Image DateTime",
"GPS GPSDate",
"EXIF GPS GPSDate",
"EXIF DateTimeModified"]]
def format_time(time_string):
'''
Format time string with invalid time elements in hours/minutes/seconds
Format for the timestring needs to be "%Y_%m_%d_%H_%M_%S"
e.g. 2014_03_31_24_10_11 => 2014_04_01_00_10_11
'''
data = time_string.split("_")
hours, minutes, seconds = int(data[3]), int(data[4]), int(data[5])
date = datetime.datetime.strptime("_".join(data[:3]), "%Y_%m_%d")
date_time = date + datetime.timedelta(hours=hours, minutes=minutes, seconds=seconds)
return date_time
def gps_to_decimal(values, reference):
sign = 1 if reference in 'NE' else -1
degrees = eval_frac(values[0])
minutes = eval_frac(values[1])
seconds = eval_frac(values[2])
return sign * (degrees + minutes / 60 + seconds / 3600)
def get_float_tag(tags, key):
if key in tags:
return float(tags[key].values[0])
else:
return None
def get_frac_tag(tags, key):
if key in tags:
return eval_frac(tags[key].values[0])
else:
return None
def extract_exif_from_file(fileobj):
if isinstance(fileobj, (str, unicode)):
with open(fileobj) as f:
exif_data = EXIF(f)
else:
exif_data = EXIF(fileobj)
d = exif_data.extract_exif()
return d
def required_fields():
return exif_gps_fields() + exif_datetime_fields()
def verify_exif(filename):
'''
Check that image file has the required EXIF fields.
Incompatible files will be ignored server side.
'''
# required tags in IFD name convention
required_exif = required_fields()
exif = EXIF(filename)
required_exif_exist = exif.fields_exist(required_exif)
return required_exif_exist
def verify_mapillary_tag(filename):
'''
Check that image file has the required Mapillary tag
'''
return EXIF(filename).mapillary_tag_exists()
def is_image(filename):
return filename.lower().endswith(('jpg', 'jpeg', 'png', 'tif', 'tiff', 'pgm', 'pnm', 'gif'))
class EXIF:
'''
EXIF class for reading exif from an image
'''
def __init__(self, filename, details=False):
'''
Initialize EXIF object with FILE as filename or fileobj
'''
self.filename = filename
if type(filename) == str:
with open(filename, 'rb') as fileobj:
self.tags = exifread.process_file(fileobj, details=details)
else:
self.tags = exifread.process_file(filename, details=details)
def _extract_alternative_fields(self, fields, default=None, field_type=float):
'''
Extract a value for a list of ordered fields.
Return the value of the first existed field in the list
'''
for field in fields:
if field in self.tags:
if field_type is float:
value = eval_frac(self.tags[field].values[0])
if field_type is str:
value = str(self.tags[field].values)
if field_type is int:
value = int(self.tags[field].values[0])
return value, field
return default, None
def exif_name(self):
'''
Name of file in the form {lat}_{lon}_{ca}_{datetime}_{filename}
'''
lon, lat = self.extract_lon_lat()
ca = self.extract_direction()
if ca is None:
ca = 0
ca = int(ca)
date_time = self.extract_capture_time()
date_time = date_time.strftime("%Y-%m-%d-%H-%M-%S-%f")
date_time = date_time[:-3]
filename = '{}_{}_{}_{}_{}'.format(lat, lon, ca, date_time, os.path.basename(self.filename))
return filename
def extract_altitude(self):
'''
Extract altitude
'''
fields = ['GPS GPSAltitude', 'EXIF GPS GPSAltitude']
altitude, _ = self._extract_alternative_fields(fields, 0, float)
return altitude
def extract_capture_time(self):
'''
Extract capture time from EXIF
return a datetime object
TODO: handle GPS DateTime
'''
time_string = exif_datetime_fields()[0]
capture_time, time_field = self._extract_alternative_fields(time_string, 0, str)
# if "GPSDate" in time_field:
# return self.extract_gps_time()
if capture_time is 0:
# try interpret the filename
try:
capture_time = datetime.datetime.strptime(os.path.basename(self.filename)[:-4]+'000', '%Y_%m_%d_%H_%M_%S_%f')
except:
pass
else:
capture_time = capture_time.replace(" ", "_")
capture_time = capture_time.replace(":", "_")
capture_time = "_".join(["{0:02d}".format(int(ts)) for ts in capture_time.split("_") if ts.isdigit()])
capture_time = format_time(capture_time)
sub_sec = self.extract_subsec()
capture_time = capture_time + datetime.timedelta(seconds=float(sub_sec)/10**len(str(sub_sec)))
return capture_time
def extract_direction(self):
'''
Extract image direction (i.e. compass, heading, bearing)
'''
fields = ['GPS GPSImgDirection',
'EXIF GPS GPSImgDirection',
'GPS GPSTrack',
'EXIF GPS GPSTrack']
direction, _ = self._extract_alternative_fields(fields)
if direction is not None:
direction = normalize_bearing(direction, check_hex=True)
return direction
def extract_dop(self):
'''
Extract dilution of precision
'''
fields = ['GPS GPSDOP', 'EXIF GPS GPSDOP']
dop, _ = self._extract_alternative_fields(fields)
return dop
def extract_geo(self):
'''
Extract geo-related information from exif
'''
altitude = self.extract_altitude()
dop = self.extract_dop()
lon, lat = self.extract_lon_lat()
d = {}
if lon is not None and lat is not None:
d['latitude'] = lat
d['longitude'] = lon
if altitude is not None:
d['altitude'] = altitude
if dop is not None:
d['dop'] = dop
return d
def extract_gps_time(self):
'''
Extract timestamp from GPS field.
'''
gps_date_field = "GPS GPSDate"
gps_time_field = "GPS GPSTimeStamp"
gps_time = 0
if gps_date_field in self.tags and gps_time_field in self.tags:
date = str(self.tags[gps_date_field].values).split(":")
t = self.tags[gps_time_field]
gps_time = datetime.datetime(
year=int(date[0]),
month=int(date[1]),
day=int(date[2]),
hour=int(eval_frac(t.values[0])),
minute=int(eval_frac(t.values[1])),
second=int(eval_frac(t.values[2])),
)
microseconds = datetime.timedelta(microseconds=int( (eval_frac(t.values[2])%1) *1e6))
gps_time += microseconds
return gps_time
def extract_exif(self):
'''
Extract a list of exif infos
'''
width, height = self.extract_image_size()
make, model = self.extract_make(), self.extract_model()
orientation = self.extract_orientation()
geo = self.extract_geo()
capture = self.extract_capture_time()
direction = self.extract_direction()
d = {
'width': width,
'height': height,
'orientation': orientation,
'direction': direction,
'make': make,
'model': model,
'capture_time': capture
}
d['gps'] = geo
return d
def extract_image_size(self):
'''
Extract image height and width
'''
width, _ = self._extract_alternative_fields(['Image ImageWidth', 'EXIF ExifImageWidth'], -1, int)
height, _ = self._extract_alternative_fields(['Image ImageLength', 'EXIF ExifImageLength'], -1, int)
return width, height
def extract_image_description(self):
'''
Extract image description
'''
description, _ = self._extract_alternative_fields(['Image ImageDescription'], "{}", str)
return description
def extract_lon_lat(self):
if 'GPS GPSLatitude' in self.tags and 'GPS GPSLatitude' in self.tags:
lat = gps_to_decimal(self.tags['GPS GPSLatitude'].values,
self.tags['GPS GPSLatitudeRef'].values)
lon = gps_to_decimal(self.tags['GPS GPSLongitude'].values,
self.tags['GPS GPSLongitudeRef'].values)
elif 'EXIF GPS GPSLatitude' in self.tags and 'EXIF GPS GPSLatitude' in self.tags:
lat = gps_to_decimal(self.tags['EXIF GPS GPSLatitude'].values,
self.tags['EXIF GPS GPSLatitudeRef'].values)
lon = gps_to_decimal(self.tags['EXIF GPS GPSLongitude'].values,
self.tags['EXIF GPS GPSLongitudeRef'].values)
else:
lon, lat = None, None
return lon, lat
def extract_make(self):
'''
Extract camera make
'''
fields = ['EXIF LensMake', 'Image Make']
make, _ = self._extract_alternative_fields(fields, default='none', field_type=str)
return make
def extract_model(self):
'''
Extract camera model
'''
fields = ['EXIF LensModel', 'Image Model']
model, _ = self._extract_alternative_fields(fields, default='none', field_type=str)
return model
def extract_orientation(self):
'''
Extract image orientation
'''
fields = ['Image Orientation']
orientation, _ = self._extract_alternative_fields(fields, default=1, field_type=int)
if orientation not in [1, 3, 6, 8]:
return 1
return orientation
def extract_subsec(self):
'''
Extract microseconds
'''
fields = [
'Image SubSecTimeOriginal',
'EXIF SubSecTimeOriginal',
'Image SubSecTimeDigitized',
'EXIF SubSecTimeDigitized',
'Image SubSecTime',
'EXIF SubSecTime'
]
sub_sec, _ = self._extract_alternative_fields(fields, default=0, field_type=str)
sub_sec = int(sub_sec)
return sub_sec
def fields_exist(self, fields):
'''
Check existence of a list fields in exif
'''
for rexif in fields:
vflag = False
for subrexif in rexif:
if subrexif in self.tags:
vflag = True
if not vflag:
print("Missing required EXIF tag: {0} for image {1}".format(rexif[0], self.filename))
return False
return True
def mapillary_tag_exists(self):
'''
Check existence of Mapillary tag
'''
description_tag = "Image ImageDescription"
if description_tag in self.tags:
if "MAPSequenceUUID" in self.tags[description_tag].values:
return True
return False

227
lib/exif_pil.py Normal file
View File

@ -0,0 +1,227 @@
import datetime
import struct # Only to catch struct.error due to error in PIL / Pillow.
from PIL import Image
from PIL.ExifTags import TAGS, GPSTAGS
# Original: https://gist.github.com/erans/983821
# License: MIT
# Credits: https://gist.github.com/erans
class ExifException(Exception):
def __init__(self, message):
self._message = message
def __str__(self):
return self._message
class PILExifReader:
def __init__(self, filepath):
self._filepath = filepath
image = Image.open(filepath)
self._exif = self.get_exif_data(image)
image.close()
def get_exif_data(self, image):
"""Returns a dictionary from the exif data of an PIL Image
item. Also converts the GPS Tags"""
exif_data = {}
try:
info = image._getexif()
except OverflowError, e:
if e.message == "cannot fit 'long' into an index-sized integer":
# Error in PIL when exif data is corrupt.
return None
else:
raise e
except struct.error as e:
if e.message == "unpack requires a string argument of length 2":
# Error in PIL when exif data is corrupt.
return None
else:
raise e
if info:
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
if decoded == "GPSInfo":
gps_data = {}
for t in value:
sub_decoded = GPSTAGS.get(t, t)
gps_data[sub_decoded] = value[t]
exif_data[decoded] = gps_data
else:
exif_data[decoded] = value
return exif_data
def read_capture_time(self):
time_tag = "DateTimeOriginal"
# read and format capture time
if self._exif == None:
print "Exif is none."
if time_tag in self._exif:
capture_time = self._exif[time_tag]
capture_time = capture_time.replace(" ","_")
capture_time = capture_time.replace(":","_")
else:
print "No time tag in "+self._filepath
capture_time = 0
# return as datetime object
return datetime.datetime.strptime(capture_time, '%Y_%m_%d_%H_%M_%S')
def _get_if_exist(self, data, key):
if key in data:
return data[key]
else:
return None
def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in
the EXIF to degrees in float format."""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0)
def get_lat_lon(self):
"""Returns the latitude and longitude, if available, from the
provided exif_data (obtained through get_exif_data above)."""
lat = None
lon = None
gps_info = self.get_gps_info()
if gps_info is None:
return None
gps_latitude = self._get_if_exist(gps_info, "GPSLatitude")
gps_latitude_ref = self._get_if_exist(gps_info, 'GPSLatitudeRef')
gps_longitude = self._get_if_exist(gps_info, 'GPSLongitude')
gps_longitude_ref = self._get_if_exist(gps_info, 'GPSLongitudeRef')
if (gps_latitude and gps_latitude_ref
and gps_longitude and gps_longitude_ref):
lat = self._convert_to_degress(gps_latitude)
if gps_latitude_ref != "N":
lat = 0 - lat
lon = self._convert_to_degress(gps_longitude)
if gps_longitude_ref != "E":
lon = 0 - lon
if isinstance(lat, float) and isinstance(lon, float):
return lat, lon
else:
return None
def calc_tuple(self, tup):
if tup is None or len(tup) != 2 or tup[1] == 0:
return None
return int(tup[0]) / int(tup[1])
def get_gps_info(self):
if self._exif is None or not "GPSInfo" in self._exif:
return None
else:
return self._exif["GPSInfo"]
def get_rotation(self):
"""Returns the direction of the GPS receiver in degrees."""
gps_info = self.get_gps_info()
if gps_info is None:
return None
for tag in ('GPSImgDirection', 'GPSTrack'):
gps_direction = self._get_if_exist(gps_info, tag)
direction = self.calc_tuple(gps_direction)
if direction == None:
continue
else:
return direction
return None
def get_speed(self):
"""Returns the GPS speed in km/h or None if it does not exists."""
gps_info = self.get_gps_info()
if gps_info is None:
return None
if not "GPSSpeed" in gps_info or not "GPSSpeedRef" in gps_info:
return None
speed_frac = gps_info["GPSSpeed"]
speed_ref = gps_info["GPSSpeedRef"]
speed = self.calc_tuple(speed_frac)
if speed is None or speed_ref is None:
return None
speed_ref = speed_ref.lower()
if speed_ref == "k":
pass # km/h - we are happy.
elif speed_ref == "m":
#Miles pr. hour => km/h
speed *= 1.609344
elif speed_ref == "n":
# Knots => km/h
speed *= 1.852
else:
print "Warning: Unknown format for GPS speed '%s' in '%s'." % (
speed_ref, self._filepath)
print "Please file a bug and attache the image."
return None
return speed
def is_ok_num(self, val, minVal, maxVal):
try:
num = int(val)
except ValueError:
return False
if num < minVal or num > maxVal:
return False
return True
def get_time(self):
# Example data
# GPSTimeStamp': ((9, 1), (14, 1), (9000, 1000))
# 'GPSDateStamp': u'2015:05:17'
gps_info = self.get_gps_info()
if gps_info is None:
return None
if not 'GPSTimeStamp' in gps_info or not 'GPSDateStamp' in gps_info:
return None
timestamp = gps_info['GPSTimeStamp']
datestamp = gps_info['GPSDateStamp']
if len(timestamp) != 3:
raise ExifException("Timestamp does not have length 3: %s" %
len(timestamp))
(timeH, timeM, timeS) = timestamp
h = self.calc_tuple(timeH)
m = self.calc_tuple(timeM)
s = self.calc_tuple(timeS)
if None in (h, m, s):
raise ExifException(
"Hour, minute or second is not valid: '%s':'%s':'%s'." %
(timeH, timeM, timeS))
if datestamp.count(':') != 2:
raise ExifException("Datestamp does not contain 2 colons: '%s'" %
datestamp)
(y, mon, d) = [int(str) for str in datestamp.split(':')]
if not self.is_ok_num(y, 1970, 2100) or not self.is_ok_num(
mon, 1, 12) or not self.is_ok_num(d, 1, 31):
raise ExifException(
"Date parsed from the following is not OK: '%s'" % datestamp)
return datetime.datetime(y, mon, d, h, m, s)

370
lib/exif_read.py Normal file
View File

@ -0,0 +1,370 @@
# coding: utf8
#!/usr/bin/env python
#source is exif_read.py from mapillary_tools :
#https://github.com/mapillary/mapillary_tools/blob/master/mapillary_tools/exif_read.py
import os
import sys
import exifread
import datetime
from geo import normalize_bearing
import uuid
sys.path.insert(0, os.path.abspath(
os.path.join(os.path.dirname(__file__), "..")))
#import jsonfrom
def eval_frac(value):
if value.den == 0:
return -1.0
return float(value.num) / float(value.den)
def format_time(time_string):
'''
Format time string with invalid time elements in hours/minutes/seconds
Format for the timestring needs to be "%Y_%m_%d_%H_%M_%S"
e.g. 2014_03_31_24_10_11 => 2014_04_01_00_10_11
'''
subseconds = False
data = time_string.split("_")
hours, minutes, seconds = int(data[3]), int(data[4]), int(data[5])
date = datetime.datetime.strptime("_".join(data[:3]), "%Y_%m_%d")
subsec = 0.0
if len(data) == 7:
if float(data[6]) != 0:
subsec = float(data[6]) / 10**len(data[6])
subseconds = True
date_time = date + \
datetime.timedelta(hours=hours, minutes=minutes,
seconds=seconds + subsec)
return date_time, subseconds
def gps_to_decimal(values, reference):
sign = 1 if reference in 'NE' else -1
degrees = eval_frac(values[0])
minutes = eval_frac(values[1])
seconds = eval_frac(values[2])
return sign * (degrees + minutes / 60 + seconds / 3600)
def exif_datetime_fields():
'''
Date time fields in EXIF
'''
return [["EXIF DateTimeOriginal",
"Image DateTimeOriginal",
"EXIF DateTimeDigitized",
"Image DateTimeDigitized",
"EXIF DateTime",
"Image DateTime",
"GPS GPSDate",
"EXIF GPS GPSDate",
"EXIF DateTimeModified"]]
def exif_gps_date_fields():
'''
Date fields in EXIF GPS
'''
return [["GPS GPSDate",
"EXIF GPS GPSDate"]]
class ExifRead:
'''
EXIF class for reading exif from an image
'''
def __init__(self, filename, details=False):
'''
Initialize EXIF object with FILE as filename or fileobj
'''
self.filename = filename
if type(filename) == str:
with open(filename, 'rb') as fileobj:
self.tags = exifread.process_file(fileobj, details=details)
else:
self.tags = exifread.process_file(filename, details=details)
def _extract_alternative_fields(self, fields, default=None, field_type=float):
'''
Extract a value for a list of ordered fields.
Return the value of the first existed field in the list
'''
for field in fields:
if field in self.tags:
if field_type is float:
value = eval_frac(self.tags[field].values[0])
if field_type is str:
value = str(self.tags[field].values)
if field_type is int:
value = int(self.tags[field].values[0])
return value, field
return default, None
def exif_name(self):
'''
Name of file in the form {lat}_{lon}_{ca}_{datetime}_{filename}_{hash}
'''
mapillary_description = json.loads(self.extract_image_description())
lat = None
lon = None
ca = None
date_time = None
if "MAPLatitude" in mapillary_description:
lat = mapillary_description["MAPLatitude"]
if "MAPLongitude" in mapillary_description:
lon = mapillary_description["MAPLongitude"]
if "MAPCompassHeading" in mapillary_description:
if 'TrueHeading' in mapillary_description["MAPCompassHeading"]:
ca = mapillary_description["MAPCompassHeading"]['TrueHeading']
if "MAPCaptureTime" in mapillary_description:
date_time = datetime.datetime.strptime(
mapillary_description["MAPCaptureTime"], "%Y_%m_%d_%H_%M_%S_%f").strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3]
filename = '{}_{}_{}_{}_{}'.format(
lat, lon, ca, date_time, uuid.uuid4())
return filename
def extract_image_history(self):
field = ['Image Tag 0x9213']
user_comment, _ = self._extract_alternative_fields(field, '{}', str)
return user_comment
def extract_altitude(self):
'''
Extract altitude
'''
fields = ['GPS GPSAltitude', 'EXIF GPS GPSAltitude']
altitude, _ = self._extract_alternative_fields(fields, 0, float)
return altitude
def extract_capture_time(self):
'''
Extract capture time from EXIF
return a datetime object
TODO: handle GPS DateTime
'''
time_string = exif_datetime_fields()[0]
capture_time, time_field = self._extract_alternative_fields(
time_string, 0, str)
if time_field in exif_gps_date_fields()[0]:
capture_time = self.extract_gps_time()
return capture_time
if capture_time is 0:
# try interpret the filename
try:
capture_time = datetime.datetime.strptime(os.path.basename(
self.filename)[:-4] + '000', '%Y_%m_%d_%H_%M_%S_%f')
except:
return None
else:
capture_time = capture_time.replace(" ", "_")
capture_time = capture_time.replace(":", "_")
capture_time = capture_time.replace(".", "_")
capture_time = capture_time.replace("-", "_")
capture_time = "_".join(
[ts for ts in capture_time.split("_") if ts.isdigit()])
capture_time, subseconds = format_time(capture_time)
sub_sec = "0"
if not subseconds:
sub_sec = self.extract_subsec()
capture_time = capture_time + \
datetime.timedelta(seconds=float("0." + sub_sec))
return capture_time
def extract_direction(self):
'''
Extract image direction (i.e. compass, heading, bearing)
'''
fields = ['GPS GPSImgDirection',
'EXIF GPS GPSImgDirection',
'GPS GPSTrack',
'EXIF GPS GPSTrack']
direction, _ = self._extract_alternative_fields(fields)
if direction is not None:
direction = normalize_bearing(direction, check_hex=True)
return direction
def extract_dop(self):
'''
Extract dilution of precision
'''
fields = ['GPS GPSDOP', 'EXIF GPS GPSDOP']
dop, _ = self._extract_alternative_fields(fields)
return dop
def extract_geo(self):
'''
Extract geo-related information from exif
'''
altitude = self.extract_altitude()
dop = self.extract_dop()
lon, lat = self.extract_lon_lat()
d = {}
if lon is not None and lat is not None:
d['latitude'] = lat
d['longitude'] = lon
if altitude is not None:
d['altitude'] = altitude
if dop is not None:
d['dop'] = dop
return d
def extract_gps_time(self):
'''
Extract timestamp from GPS field.
'''
gps_date_field = "GPS GPSDate"
gps_time_field = "GPS GPSTimeStamp"
gps_time = 0
if gps_date_field in self.tags and gps_time_field in self.tags:
date = str(self.tags[gps_date_field].values).split(":")
t = self.tags[gps_time_field]
gps_time = datetime.datetime(
year=int(date[0]),
month=int(date[1]),
day=int(date[2]),
hour=int(eval_frac(t.values[0])),
minute=int(eval_frac(t.values[1])),
second=int(eval_frac(t.values[2])),
)
microseconds = datetime.timedelta(
microseconds=int((eval_frac(t.values[2]) % 1) * 1e6))
gps_time += microseconds
return gps_time
def extract_exif(self):
'''
Extract a list of exif infos
'''
width, height = self.extract_image_size()
make, model = self.extract_make(), self.extract_model()
orientation = self.extract_orientation()
geo = self.extract_geo()
capture = self.extract_capture_time()
direction = self.extract_direction()
d = {
'width': width,
'height': height,
'orientation': orientation,
'direction': direction,
'make': make,
'model': model,
'capture_time': capture
}
d['gps'] = geo
return d
def extract_image_size(self):
'''
Extract image height and width
'''
width, _ = self._extract_alternative_fields(
['Image ImageWidth', 'EXIF ExifImageWidth'], -1, int)
height, _ = self._extract_alternative_fields(
['Image ImageLength', 'EXIF ExifImageLength'], -1, int)
return width, height
def extract_image_description(self):
'''
Extract image description
'''
description, _ = self._extract_alternative_fields(
['Image ImageDescription'], "{}", str)
return description
def extract_lon_lat(self):
if 'GPS GPSLatitude' in self.tags and 'GPS GPSLatitude' in self.tags:
lat = gps_to_decimal(self.tags['GPS GPSLatitude'].values,
self.tags['GPS GPSLatitudeRef'].values)
lon = gps_to_decimal(self.tags['GPS GPSLongitude'].values,
self.tags['GPS GPSLongitudeRef'].values)
elif 'EXIF GPS GPSLatitude' in self.tags and 'EXIF GPS GPSLatitude' in self.tags:
lat = gps_to_decimal(self.tags['EXIF GPS GPSLatitude'].values,
self.tags['EXIF GPS GPSLatitudeRef'].values)
lon = gps_to_decimal(self.tags['EXIF GPS GPSLongitude'].values,
self.tags['EXIF GPS GPSLongitudeRef'].values)
else:
lon, lat = None, None
return lon, lat
def extract_make(self):
'''
Extract camera make
'''
fields = ['EXIF LensMake', 'Image Make']
make, _ = self._extract_alternative_fields(
fields, default='none', field_type=str)
return make
def extract_model(self):
'''
Extract camera model
'''
fields = ['EXIF LensModel', 'Image Model']
model, _ = self._extract_alternative_fields(
fields, default='none', field_type=str)
return model
def extract_orientation(self):
'''
Extract image orientation
'''
fields = ['Image Orientation']
orientation, _ = self._extract_alternative_fields(
fields, default=1, field_type=int)
if orientation not in range(1, 9):
return 1
return orientation
def extract_subsec(self):
'''
Extract microseconds
'''
fields = [
'Image SubSecTimeOriginal',
'EXIF SubSecTimeOriginal',
'Image SubSecTimeDigitized',
'EXIF SubSecTimeDigitized',
'Image SubSecTime',
'EXIF SubSecTime'
]
sub_sec, _ = self._extract_alternative_fields(
fields, default='', field_type=str)
return sub_sec.strip()
def fields_exist(self, fields):
'''
Check existence of a list fields in exif
'''
for rexif in fields:
vflag = False
for subrexif in rexif:
if subrexif in self.tags:
vflag = True
if not vflag:
print("Missing required EXIF tag: {0} for image {1}".format(
rexif[0], self.filename))
return False
return True
def mapillary_tag_exists(self):
'''
Check existence of required Mapillary tags
'''
description_tag = "Image ImageDescription"
if description_tag not in self.tags:
return False
for requirement in ["MAPSequenceUUID", "MAPSettingsUserKey", "MAPCaptureTime", "MAPLongitude", "MAPLatitude"]:
if requirement not in self.tags[description_tag].values or json.loads(self.tags[description_tag].values)[requirement] in ["", None, " "]:
return False
return True

122
lib/exif_write.py Normal file
View File

@ -0,0 +1,122 @@
import sys
import json
import piexif
from . geo import decimal_to_dms
#from .error import print_error
class ExifEdit(object):
def __init__(self, filename):
"""Initialize the object"""
self._filename = filename
self._ef = None
try:
self._ef = piexif.load(filename)
except IOError:
etype, value, traceback = sys.exc_info()
print >> sys.stderr, "Error opening file:", value
except ValueError:
etype, value, traceback = sys.exc_info()
print >> sys.stderr, "Error opening file:", value
def add_image_description(self, dict):
"""Add a dict to image description."""
if self._ef is not None:
self._ef['0th'][piexif.ImageIFD.ImageDescription] = json.dumps(
dict)
def add_orientation(self, orientation):
"""Add image orientation to image."""
if not orientation in range(1, 9):
print(
"Error value for orientation, value must be in range(1,9), setting to default 1")
self._ef['0th'][piexif.ImageIFD.Orientation] = 1
else:
self._ef['0th'][piexif.ImageIFD.Orientation] = orientation
def add_date_time_original(self, date_time):
"""Add date time original."""
try:
DateTimeOriginal = date_time.strftime('%Y:%m:%d %H:%M:%S')
self._ef['Exif'][piexif.ExifIFD.DateTimeOriginal] = DateTimeOriginal
except Exception as e:
print("Error writing DateTimeOriginal, due to " + str(e))
if date_time.microsecond != 0:
self.add_subsectimeoriginal(date_time.microsecond)
def add_subsectimeoriginal(self, subsec_value):
"""Add subsecond value in the subsectimeoriginal exif tag"""
try:
subsec = str(subsec_value).zfill(6)
self._ef['Exif'][piexif.ExifIFD.SubSecTimeOriginal] = subsec
except Exception as e:
print("Error writing SubSecTimeOriginal, due to " + str(e))
def add_lat_lon(self, lat, lon, precision=1e7):
"""Add lat, lon to gps (lat, lon in float)."""
self._ef["GPS"][piexif.GPSIFD.GPSLatitudeRef] = "N" if lat > 0 else "S"
self._ef["GPS"][piexif.GPSIFD.GPSLongitudeRef] = "E" if lon > 0 else "W"
self._ef["GPS"][piexif.GPSIFD.GPSLongitude] = decimal_to_dms(
abs(lon), int(precision))
self._ef["GPS"][piexif.GPSIFD.GPSLatitude] = decimal_to_dms(
abs(lat), int(precision))
def add_image_history(self, data):
"""Add arbitrary string to ImageHistory tag."""
self._ef['0th'][piexif.ImageIFD.ImageHistory] = json.dumps(data)
def add_camera_make_model(self, make, model):
''' Add camera make and model.'''
self._ef['0th'][piexif.ImageIFD.Make] = make
self._ef['0th'][piexif.ImageIFD.Model] = model
def add_dop(self, dop, precision=100):
"""Add GPSDOP (float)."""
self._ef["GPS"][piexif.GPSIFD.GPSDOP] = (
int(abs(dop) * precision), precision)
def add_altitude(self, altitude, precision=100):
"""Add altitude (pre is the precision)."""
ref = 0 if altitude > 0 else 1
self._ef["GPS"][piexif.GPSIFD.GPSAltitude] = (
int(abs(altitude) * precision), precision)
self._ef["GPS"][piexif.GPSIFD.GPSAltitudeRef] = ref
def add_direction(self, direction, ref="T", precision=100):
"""Add image direction."""
# normalize direction
direction = direction % 360.0
self._ef["GPS"][piexif.GPSIFD.GPSImgDirection] = (
int(abs(direction) * precision), precision)
self._ef["GPS"][piexif.GPSIFD.GPSImgDirectionRef] = ref
def add_firmware(self,firmware_string):
"""Add firmware version of camera"""
self._ef['0th'][piexif.ImageIFD.Software] = firmware_string
def add_custom_tag(self, value, main_key, tag_key):
try:
self._ef[main_key][tag_key] = value
except:
print("could not set tag {} under {} with value {}".format(
tag_key, main_key, value))
def write(self, filename=None):
"""Save exif data to file."""
if filename is None:
filename = self._filename
exif_bytes = piexif.dump(self._ef)
with open(self._filename, "rb") as fin:
img = fin.read()
try:
piexif.insert(exif_bytes, img, filename)
except IOError:
type, value, traceback = sys.exc_info()
print >> sys.stderr, "Error saving file:", value

245
lib/exifedit.py Normal file
View File

@ -0,0 +1,245 @@
import sys
import json
import datetime
import hashlib
import base64
import uuid
from lib.geo import normalize_bearing
from lib.exif import EXIF, verify_exif
from lib.pexif import JpegFile, Rational
import shutil
def create_mapillary_description(filename, username, email, userkey,
upload_hash, sequence_uuid,
interpolated_heading=None,
offset_angle=0.0,
timestamp=None,
orientation=None,
project="",
secret_hash=None,
external_properties=None,
verbose=False):
'''
Check that image file has the required EXIF fields.
Incompatible files will be ignored server side.
'''
# read exif
exif = EXIF(filename)
if not verify_exif(filename):
return False
if orientation is None:
orientation = exif.extract_orientation()
# write the mapillary tag
mapillary_description = {}
# lat, lon of the image, takes precedence over EXIF GPS values
mapillary_description["MAPLongitude"], mapillary_description["MAPLatitude"] = exif.extract_lon_lat()
# altitude of the image, takes precedence over EXIF GPS values, assumed 0 if missing
mapillary_description["MAPAltitude"] = exif.extract_altitude()
# capture time: required date format: 2015_01_14_09_37_01_000, TZ MUST be UTC
if timestamp is None:
timestamp = exif.extract_capture_time()
# The capture time of the image in UTC. Will take precedence over any other time tags in the EXIF
mapillary_description["MAPCaptureTime"] = datetime.datetime.strftime(timestamp, "%Y_%m_%d_%H_%M_%S_%f")[:-3]
# EXIF orientation of the image
mapillary_description["MAPOrientation"] = orientation
heading = exif.extract_direction()
if heading is None:
heading = 0.0
heading = normalize_bearing(interpolated_heading + offset_angle) if interpolated_heading is not None else normalize_bearing(heading + offset_angle)
# bearing of the image
mapillary_description["MAPCompassHeading"] = {"TrueHeading": heading, "MagneticHeading": heading}
# authentication
assert(email is not None or userkey is not None)
if email is not None:
mapillary_description["MAPSettingsEmail"] = email
if username is not None:
mapillary_description["MAPSettingsUsername"] = username
# use this if available, and omit MAPSettingsUsername and MAPSettingsEmail for privacy reasons
if userkey is not None:
mapillary_description["MAPSettingsUserKey"] = userkey
if upload_hash is not None:
settings_upload_hash = hashlib.sha256("%s%s%s" % (upload_hash, email, base64.b64encode(filename))).hexdigest()
# this is not checked in the backend right now, will likely be changed to have user_key instead of email as part
# of the hash
mapillary_description['MAPSettingsUploadHash'] = settings_upload_hash
# a unique photo ID to check for duplicates in the backend in case the image gets uploaded more than once
mapillary_description['MAPPhotoUUID'] = str(uuid.uuid4())
# a sequene ID to make the images go together (order by MAPCaptureTime)
mapillary_description['MAPSequenceUUID'] = str(sequence_uuid)
# The device model
mapillary_description['MAPDeviceModel'] = exif.extract_model()
# The device manufacturer
mapillary_description['MAPDeviceMake'] = exif.extract_make()
if upload_hash is None and secret_hash is not None:
mapillary_description['MAPVideoSecure'] = secret_hash
mapillary_description["MAPSettingsProject"] = project
# external properties (optional)
if external_properties is not None:
# externl proerties can be saved and searched in Mapillary later on
mapillary_description['MAPExternalProperties'] = external_properties
# write to file
if verbose:
print("tag: {0}".format(mapillary_description))
metadata = ExifEdit(filename)
metadata.add_image_description(mapillary_description)
metadata.add_orientation(orientation)
metadata.add_direction(heading)
metadata.write()
def add_mapillary_description(filename, username, email,
project, upload_hash, image_description,
output_file=None):
"""Add Mapillary description tags directly with user info."""
if username is not None:
# write the mapillary tag
image_description["MAPSettingsUploadHash"] = upload_hash
image_description["MAPSettingsEmail"] = email
image_description["MAPSettingsUsername"] = username
settings_upload_hash = hashlib.sha256("%s%s%s" % (upload_hash, email, base64.b64encode(filename))).hexdigest()
image_description['MAPSettingsUploadHash'] = settings_upload_hash
# if this image is part of a projet, the project UUID
image_description["MAPSettingsProject"] = project
assert("MAPSequenceUUID" in image_description)
if output_file is not None:
shutil.copy(filename, output_file)
filename = output_file
# write to file
json_desc = json.dumps(image_description)
metadata = ExifEdit(filename)
metadata.add_image_description(json_desc)
metadata.add_orientation(image_description.get("MAPOrientation", 1))
metadata.add_direction(image_description["MAPCompassHeading"]["TrueHeading"])
metadata.add_lat_lon(image_description["MAPLatitude"], image_description["MAPLongitude"])
date_time = datetime.datetime.strptime(image_description["MAPCaptureTime"]+"000", "%Y_%m_%d_%H_%M_%S_%f")
metadata.add_date_time_original(date_time)
metadata.write()
def add_exif_data(filename, data, output_file=None):
"""Add minimal exif data to an image"""
if output_file is not None:
shutil.copy(filename, output_file)
filename = output_file
metadata = ExifEdit(filename)
metadata.add_orientation(data.get("orientation", 1))
metadata.add_direction(data.get("bearing", 0))
metadata.add_lat_lon(data["lat"], data["lon"])
metadata.add_date_time_original(data["capture_time"])
metadata.add_camera_make_model(data["make"], data["model"])
metadata.write()
class ExifEdit(object):
def __init__(self, filename):
"""Initialize the object"""
self.filename = filename
self.ef = None
if (type(filename) is str) or (type(filename) is unicode):
self.ef = JpegFile.fromFile(filename)
else:
filename.seek(0)
self.ef = JpegFile.fromString(filename.getvalue())
try:
if (type(filename) is str) or (type(filename) is unicode):
self.ef = JpegFile.fromFile(filename)
else:
filename.seek(0)
self.ef = JpegFile.fromString(filename.getvalue())
except IOError:
etype, value, traceback = sys.exc_info()
print >> sys.stderr, "Error opening file:", value
except JpegFile.InvalidFile:
etype, value, traceback = sys.exc_info()
print >> sys.stderr, "Error opening file:", value
def add_image_description(self, dict):
"""Add a dict to image description."""
if self.ef is not None:
self.ef.exif.primary.ImageDescription = json.dumps(dict)
def add_orientation(self, orientation):
"""Add image orientation to image."""
self.ef.exif.primary.Orientation = [orientation]
def add_date_time_original(self, date_time):
"""Add date time original."""
self.ef.exif.primary.ExtendedEXIF.DateTimeOriginal = date_time.strftime('%Y:%m:%d %H:%M:%S')
"""Add subsecond if the value exists"""
if date_time.microsecond:
subsec = str(date_time.microsecond).zfill(6)
self.add_subsec_time_original(subsec)
#if date_time.microsecond:
# self.ef.exif.primary.ExtendedEXIF.SubSecTimeOriginal = str(date_time.microsecond).zfill(6)
def add_subsec_time_original(self, subsec):
"""Add subsecond."""
self.ef.exif.primary.ExtendedEXIF.SubSecTimeOriginal = subsec
def add_lat_lon(self, lat, lon):
"""Add lat, lon to gps (lat, lon in float)."""
self.ef.set_geo(float(lat), float(lon))
def add_camera_make_model(self, make, model):
''' Add camera make and model.'''
self.ef.exif.primary.Make = make
self.ef.exif.primary.Model = model
def add_dop(self, dop, perc=100):
"""Add GPSDOP (float)."""
self.ef.exif.primary.GPS.GPSDOP = [Rational(abs(dop * perc), perc)]
def add_altitude(self, altitude, precision=100):
"""Add altitude (pre is the precision)."""
ref = '\x00' if altitude > 0 else '\x01'
self.ef.exif.primary.GPS.GPSAltitude = [Rational(abs(altitude * precision), precision)]
self.ef.exif.primary.GPS.GPSAltitudeRef = [ref]
def add_direction(self, direction, ref="T", precision=100):
"""Add image direction."""
self.ef.exif.primary.GPS.GPSImgDirection = [Rational(abs(direction * precision), precision)]
self.ef.exif.primary.GPS.GPSImgDirectionRef = ref
def write(self, filename=None):
"""Save exif data to file."""
try:
if filename is None:
filename = self.filename
self.ef.writeFile(filename)
except IOError:
type, value, traceback = sys.exc_info()
print >> sys.stderr, "Error saving file:", value
def write_to_string(self):
"""Save exif data to StringIO object."""
return self.ef.writeString()
def write_to_file_object(self):
"""Save exif data to file object."""
return self.ef.writeFd()

222
lib/ffprobe.py Normal file
View File

@ -0,0 +1,222 @@
#!/usr/bin/python
# Filename: ffprobe.py
"""
Based on Python wrapper for ffprobe command line tool. ffprobe must exist in the path.
Author: Simon Hargreaves
"""
version='0.5'
import subprocess
import re
import sys
import os
import platform
class FFProbe:
"""
FFProbe wraps the ffprobe command and pulls the data into an object form::
metadata=FFProbe('multimedia-file.mov')
"""
def __init__(self,video_file):
self.video_file=video_file
try:
with open(os.devnull, 'w') as tempf:
subprocess.check_call(["ffprobe","-h"],stdout=tempf,stderr=tempf)
except:
raise IOError('ffprobe not found.')
if os.path.isfile(video_file):
video_file = self.video_file.replace(" ", "\ ")
if str(platform.system())=='Windows':
cmd=["ffprobe", "-show_streams", video_file]
else:
cmd=["ffprobe -show_streams " + video_file]
p = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)
self.format=None
self.created=None
self.duration=None
self.start=None
self.bitrate=None
self.creation_time=None
self.streams=[]
self.video=[]
self.audio=[]
datalines=[]
for a in iter(p.stdout.readline, b''):
if re.match('\[STREAM\]',a):
datalines=[]
elif re.match('\[\/STREAM\]',a):
self.streams.append(FFStream(datalines))
datalines=[]
else:
datalines.append(a)
for a in iter(p.stderr.readline, b''):
if re.match('\[STREAM\]',a):
datalines=[]
elif re.match('\[\/STREAM\]',a):
self.streams.append(FFStream(datalines))
datalines=[]
else:
datalines.append(a)
p.stdout.close()
p.stderr.close()
for a in self.streams:
if a.isAudio():
self.audio.append(a)
if a.isVideo():
self.video.append(a)
else:
raise IOError('No such media file ' + video_file)
class FFStream:
"""
An object representation of an individual stream in a multimedia file.
"""
def __init__(self,datalines):
for a in datalines:
if re.match(r'^.+=.+$', a) is None:
print "Warning: detected incorrect stream metadata line format: %s" % a
else:
(key,val)=a.strip().split('=')
key = key.lstrip("TAG:")
self.__dict__[key]=val
def isAudio(self):
"""
Is this stream labelled as an audio stream?
"""
val=False
if self.__dict__['codec_type']:
if str(self.__dict__['codec_type']) == 'audio':
val=True
return val
def isVideo(self):
"""
Is the stream labelled as a video stream.
"""
val=False
if self.__dict__['codec_type']:
if self.codec_type == 'video':
val=True
return val
def isSubtitle(self):
"""
Is the stream labelled as a subtitle stream.
"""
val=False
if self.__dict__['codec_type']:
if str(self.codec_type)=='subtitle':
val=True
return val
def frameSize(self):
"""
Returns the pixel frame size as an integer tuple (width,height) if the stream is a video stream.
Returns None if it is not a video stream.
"""
size=None
if self.isVideo():
if self.__dict__['width'] and self.__dict__['height']:
try:
size=(int(self.__dict__['width']),int(self.__dict__['height']))
except Exception as e:
print "None integer size %s:%s" %(str(self.__dict__['width']),str(+self.__dict__['height']))
size=(0,0)
return size
def pixelFormat(self):
"""
Returns a string representing the pixel format of the video stream. e.g. yuv420p.
Returns none is it is not a video stream.
"""
f=None
if self.isVideo():
if self.__dict__['pix_fmt']:
f=self.__dict__['pix_fmt']
return f
def frames(self):
"""
Returns the length of a video stream in frames. Returns 0 if not a video stream.
"""
f=0
if self.isVideo() or self.isAudio():
if self.__dict__['nb_frames']:
try:
f=int(self.__dict__['nb_frames'])
except Exception as e:
print "None integer frame count"
return f
def durationSeconds(self):
"""
Returns the runtime duration of the video stream as a floating point number of seconds.
Returns 0.0 if not a video stream.
"""
f=0.0
if self.isVideo() or self.isAudio():
if self.__dict__['duration']:
try:
f=float(self.__dict__['duration'])
except Exception as e:
print "None numeric duration"
return f
def language(self):
"""
Returns language tag of stream. e.g. eng
"""
lang=None
if self.__dict__['TAG:language']:
lang=self.__dict__['TAG:language']
return lang
def codec(self):
"""
Returns a string representation of the stream codec.
"""
codec_name=None
if self.__dict__['codec_name']:
codec_name=self.__dict__['codec_name']
return codec_name
def codecDescription(self):
"""
Returns a long representation of the stream codec.
"""
codec_d=None
if self.__dict__['codec_long_name']:
codec_d=self.__dict__['codec_long_name']
return codec_d
def codecTag(self):
"""
Returns a short representative tag of the stream codec.
"""
codec_t=None
if self.__dict__['codec_tag_string']:
codec_t=self.__dict__['codec_tag_string']
return codec_t
def bitrate(self):
"""
Returns bitrate as an integer in bps
"""
b=0
if self.__dict__['bit_rate']:
try:
b=int(self.__dict__['bit_rate'])
except Exception as e:
print "None integer bitrate"
return b
if __name__ == '__main__':
print "Module ffprobe"

198
lib/geo.py Normal file
View File

@ -0,0 +1,198 @@
# -*- coding: utf-8 -*-
import datetime
import math
WGS84_a = 6378137.0
WGS84_b = 6356752.314245
def ecef_from_lla(lat, lon, alt):
'''
Compute ECEF XYZ from latitude, longitude and altitude.
All using the WGS94 model.
Altitude is the distance to the WGS94 ellipsoid.
Check results here http://www.oc.nps.edu/oc2902w/coord/llhxyz.htm
'''
a2 = WGS84_a**2
b2 = WGS84_b**2
lat = math.radians(lat)
lon = math.radians(lon)
L = 1.0 / math.sqrt(a2 * math.cos(lat)**2 + b2 * math.sin(lat)**2)
x = (a2 * L + alt) * math.cos(lat) * math.cos(lon)
y = (a2 * L + alt) * math.cos(lat) * math.sin(lon)
z = (b2 * L + alt) * math.sin(lat)
return x, y, z
def gps_distance(latlon_1, latlon_2):
'''
Distance between two (lat,lon) pairs.
>>> p1 = (42.1, -11.1)
>>> p2 = (42.2, -11.3)
>>> 19000 < gps_distance(p1, p2) < 20000
True
'''
x1, y1, z1 = ecef_from_lla(latlon_1[0], latlon_1[1], 0.)
x2, y2, z2 = ecef_from_lla(latlon_2[0], latlon_2[1], 0.)
dis = math.sqrt((x1-x2)**2 + (y1-y2)**2 + (z1-z2)**2)
return dis
def dms_to_decimal(degrees, minutes, seconds, hemisphere):
'''
Convert from degrees, minutes, seconds to decimal degrees.
@author: mprins
'''
dms = float(degrees) + float(minutes) / 60 + float(seconds) / 3600
if hemisphere in "WwSs":
dms = -1 * dms
return dms
def decimal_to_dms(value, precision):
'''
Convert decimal position to degrees, minutes, seconds
'''
deg = math.floor(value)
min = math.floor((value - deg) * 60)
sec = math.floor((value - deg - min / 60) * 3600 * precision)
return (deg, 1), (min, 1), (sec, precision)
def gpgga_to_dms(gpgga):
'''
Convert GPS coordinate in GPGGA format to degree/minute/second
Reference: http://us.cactii.net/~bb/gps.py
'''
deg_min, dmin = gpgga.split('.')
degrees = int(deg_min[:-2])
minutes = float('%s.%s' % (deg_min[-2:], dmin))
decimal = degrees + (minutes/60)
return decimal
def utc_to_localtime(utc_time):
utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now()
return utc_time - utc_offset_timedelta
def compute_bearing(start_lat, start_lon, end_lat, end_lon):
'''
Get the compass bearing from start to end.
Formula from
http://www.movable-type.co.uk/scripts/latlong.html
'''
# make sure everything is in radians
start_lat = math.radians(start_lat)
start_lon = math.radians(start_lon)
end_lat = math.radians(end_lat)
end_lon = math.radians(end_lon)
dLong = end_lon - start_lon
dPhi = math.log(math.tan(end_lat/2.0+math.pi/4.0)/math.tan(start_lat/2.0+math.pi/4.0))
if abs(dLong) > math.pi:
if dLong > 0.0:
dLong = -(2.0 * math.pi - dLong)
else:
dLong = (2.0 * math.pi + dLong)
y = math.sin(dLong)*math.cos(end_lat)
x = math.cos(start_lat)*math.sin(end_lat) - math.sin(start_lat)*math.cos(end_lat)*math.cos(dLong)
bearing = (math.degrees(math.atan2(y, x)) + 360.0) % 360.0
return bearing
def diff_bearing(b1, b2):
'''
Compute difference between two bearings
'''
d = abs(b2-b1)
d = 360-d if d>180 else d
return d
def offset_bearing(bearing, offset):
'''
Add offset to bearing
'''
bearing = (bearing + offset) % 360
return bearing
def normalize_bearing(bearing, check_hex=False):
'''
Normalize bearing and convert from hex if
'''
if bearing > 360 and check_hex:
# fix negative value wrongly parsed in exifread
# -360 degree -> 4294966935 when converting from hex
bearing = bin(int(bearing))[2:]
bearing = ''.join([str(int(int(a)==0)) for a in bearing])
bearing = -float(int(bearing, 2))
bearing %= 360
return bearing
def interpolate_lat_lon(points, t, max_dt=1):
'''
Return interpolated lat, lon and compass bearing for time t.
Points is a list of tuples (time, lat, lon, elevation), t a datetime object.
'''
# find the enclosing points in sorted list
if (t<=points[0][0]) or (t>=points[-1][0]):
if t<=points[0][0]:
dt = abs((points[0][0]-t).total_seconds())
else:
dt = (t-points[-1][0]).total_seconds()
if dt>max_dt:
raise ValueError("Time t not in scope of gpx file.")
else:
print ("Warning: Time t not in scope of gpx file by {} seconds, extrapolating...".format(dt))
if t < points[0][0]:
before = points[0]
after = points[1]
else:
before = points[-2]
after = points[-1]
bearing = compute_bearing(before[1], before[2], after[1], after[2])
if t==points[0][0]:
x = points[0]
return (x[1], x[2], bearing, x[3])
if t==points[-1][0]:
x = points[-1]
return (x[1], x[2], bearing, x[3])
else:
for i,point in enumerate(points):
if t<point[0]:
if i>0:
before = points[i-1]
else:
before = points[i]
after = points[i]
break
# time diff
dt_before = (t-before[0]).total_seconds()
dt_after = (after[0]-t).total_seconds()
# simple linear interpolation
lat = (before[1]*dt_after + after[1]*dt_before) / (dt_before + dt_after)
lon = (before[2]*dt_after + after[2]*dt_before) / (dt_before + dt_after)
bearing = compute_bearing(before[1], before[2], after[1], after[2])
if before[3] is not None:
ele = (before[3]*dt_after + after[3]*dt_before) / (dt_before + dt_after)
else:
ele = None
return lat, lon, bearing, ele

89
lib/gps_parser.py Normal file
View File

@ -0,0 +1,89 @@
#!/usr/bin/python
import sys
import os
import datetime
import time
from .geo import gpgga_to_dms, utc_to_localtime
import gpxpy
import pynmea2
'''
Methods for parsing gps data from various file format e.g. GPX, NMEA, SRT.
'''
def get_lat_lon_time_from_gpx(gpx_file, local_time=True):
'''
Read location and time stamps from a track in a GPX file.
Returns a list of tuples (time, lat, lon).
GPX stores time in UTC, by default we assume your camera used the local time
and convert accordingly.
'''
with open(gpx_file, 'r') as f:
gpx = gpxpy.parse(f)
points = []
if len(gpx.tracks)>0:
for track in gpx.tracks:
for segment in track.segments:
for point in segment.points:
t = utc_to_localtime(point.time) if local_time else point.time
points.append( (t, point.latitude, point.longitude, point.elevation) )
'''if len(gpx.waypoints) > 0:
for point in gpx.waypoints:
t = utc_to_localtime(point.time) if local_time else point.time
points.append( (t, point.latitude, point.longitude, point.elevation) )'''
# sort by time just in case
points.sort()
return points
def get_lat_lon_time_from_nmea(nmea_file, local_time=True):
'''
Read location and time stamps from a track in a NMEA file.
Returns a list of tuples (time, lat, lon).
GPX stores time in UTC, by default we assume your camera used the local time
and convert accordingly.
'''
gga_Talker_id = ("$GNGGA", "$GPGGA", "$GLGGA", "$GBGGA", "$GAGGA")
rmc_Talker_id = ("$GNRMC", "$GPRMC", "$GLRMC", "$GBRMC", "$GARMC")
with open(nmea_file, "r") as f:
lines = f.readlines()
lines = [l.rstrip("\n\r") for l in lines]
# Get initial date
for l in lines:
if any(rmc in l for rmc in rmc_Talker_id):
data = pynmea2.parse(l, check=False)
date = data.datetime.date()
break
# Parse GPS trace
points = []
for l in lines:
if any(rmc in l for rmc in rmc_Talker_id):
data = pynmea2.parse(l, check=False)
date = data.datetime.date()
if any(gga in l for gga in gga_Talker_id):
data = pynmea2.parse(l, check=False)
timestamp = datetime.datetime.combine(date, data.timestamp)
lat, lon, alt = data.latitude, data.longitude, data.altitude
points.append((timestamp, lat, lon, alt))
points.sort()
return points

27
lib/io.py Normal file
View File

@ -0,0 +1,27 @@
import os
import errno
import sys
def mkdir_p(path):
'''
Make a directory including parent directories.
'''
try:
os.makedirs(path)
except os.error as exc:
if exc.errno != errno.EEXIST or not os.path.isdir(path):
raise
def progress(count, total, suffix=''):
'''
Display progress bar
sources: https://gist.github.com/vladignatyev/06860ec2040cb497f0f3
'''
bar_len = 60
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '=' * filled_len + '-' * (bar_len - filled_len)
sys.stdout.write('[%s] %s%s %s\r' % (bar, percents, '%', suffix))
sys.stdout.flush()

1153
lib/pexif.py Normal file

File diff suppressed because it is too large Load Diff

317
lib/sequence.py Normal file
View File

@ -0,0 +1,317 @@
import os
import sys
import lib.io
import lib.geo
from lib.exif import EXIF, verify_exif
from collections import OrderedDict
import datetime
'''
Sequence class for organizing/cleaning up photos in a folder
- split to sequences based on time intervals
- split to sequences based on gps distances
- remove duplicate images (e.g. waiting for red light, in traffic etc) @simonmikkelsen
'''
MAXIMUM_SEQUENCE_LENGTH = 1000
class Sequence(object):
def __init__(self, filepath, skip_folders=[], skip_subfolders=False, check_exif=True):
self.filepath = filepath
self._skip_folders = skip_folders
self._skip_subfolders = skip_subfolders
self.file_list = self.get_file_list(filepath, check_exif)
self.num_images = len(self.file_list)
def _is_skip(self, filepath):
'''
Skip photos in specified folders
- filepath/duplicates: it stores potential duplicate photos
detected by method 'remove_duplicates'
- filepath/success: it stores photos that have been successfully
'''
_is_skip = False
for folder in self._skip_folders:
if folder in filepath:
_is_skip = True
if self._skip_subfolders and filepath != self.filepath:
_is_skip = True
return _is_skip
def _read_capture_time(self, filename):
'''
Use EXIF class to parse capture time from EXIF.
'''
exif = EXIF(filename)
return exif.extract_capture_time()
def _read_lat_lon(self, filename):
'''
Use EXIF class to parse latitude and longitude from EXIF.
'''
exif = EXIF(filename)
lon, lat = exif.extract_lon_lat()
return lat, lon
def _read_direction(self, filename):
'''
Use EXIF class to parse compass direction from EXIF.
'''
exif = EXIF(filename)
direction = exif.extract_direction()
return direction
def get_file_list(self, filepath, check_exif=True):
'''
Get the list of JPEGs in the folder (nested folders)
'''
if filepath.lower().endswith(".jpg"):
# single file
file_list = [filepath]
else:
file_list = []
for root, sub_folders, files in os.walk(self.filepath):
if not self._is_skip(root):
image_files = [os.path.join(root, filename) for filename in files if (filename.lower().endswith(".jpg"))]
if check_exif:
image_files = [f for f in image_files if verify_exif(f)]
file_list += image_files
return file_list
def sort_file_list(self, file_list):
'''
Read capture times and sort files in time order.
'''
if len(file_list) == 0:
return [], []
capture_times = [self._read_capture_time(filepath) for filepath in file_list]
sorted_times_files = zip(capture_times, file_list)
sorted_times_files.sort()
return zip(*sorted_times_files)
def move_groups(self, groups, sub_path=''):
'''
Move the files in the groups to new folders.
'''
for i,group in enumerate(groups):
new_dir = os.path.join(self.filepath, sub_path, str(i))
lib.io.mkdir_p(new_dir)
for filepath in group:
os.rename(filepath, os.path.join(new_dir, os.path.basename(filepath)))
print("Moved {0} photos to {1}".format(len(group), new_dir))
def set_skip_folders(self, folders):
'''
Set folders to skip when iterating through the path
'''
self._skip_folders = folders
def set_file_list(self, file_list):
'''
Set file list for the sequence
'''
self.file_list = file_list
def split(self, cutoff_distance=500., cutoff_time=None, max_sequence_length=MAXIMUM_SEQUENCE_LENGTH, move_files=True, verbose=False, skip_cutoff=False):
'''
Split photos into sequences in case of large distance gap or large time interval
@params cutoff_distance: maximum distance gap in meters
@params cutoff_time: maximum time interval in seconds (if None, use 1.5 x median time interval in the sequence)
'''
file_list = self.file_list
groups = []
if len(file_list) >= 1:
# sort based on EXIF capture time
capture_times, file_list = self.sort_file_list(file_list)
# diff in capture time
capture_deltas = [t2-t1 for t1,t2 in zip(capture_times, capture_times[1:])]
# read gps for ordered files
latlons = [self._read_lat_lon(filepath) for filepath in file_list]
# distance between consecutive images
distances = [lib.geo.gps_distance(ll1, ll2) for ll1, ll2 in zip(latlons, latlons[1:])]
# if cutoff time is given use that, else assume cutoff is 1.5x median time delta
if cutoff_time is None:
if verbose:
print "Cut-off time is None"
median = sorted(capture_deltas)[len(capture_deltas)//2]
if type(median) is not int:
median = median.total_seconds()
cutoff_time = 1.5*median
# extract groups by cutting using cutoff time
group = [file_list[0]]
cut = 0
for i,filepath in enumerate(file_list[1:]):
cut_time = capture_deltas[i].total_seconds() > cutoff_time
cut_distance = distances[i] > cutoff_distance
cut_sequence_length = len(group) > max_sequence_length
if cut_time or cut_distance or cut_sequence_length:
cut += 1
# delta too big, save current group, start new
groups.append(group)
group = [filepath]
if verbose:
if cut_distance:
print 'Cut {}: Delta in distance {} meters is too bigger than cutoff_distance {} meters at {}'.format(cut,distances[i], cutoff_distance, file_list[i+1])
elif cut_time:
print 'Cut {}: Delta in time {} seconds is bigger then cutoff_time {} seconds at {}'.format(cut, capture_deltas[i].total_seconds(), cutoff_time, file_list[i+1])
elif cut_sequence_length:
print 'Cut {}: Maximum sequence length {} reached at {}'.format(cut, max_sequence_length, file_list[i+1])
else:
group.append(filepath)
groups.append(group)
# move groups to subfolders
if move_files:
self.move_groups(groups)
print("Done split photos in {} into {} sequences".format(self.filepath, len(groups)))
return groups
def interpolate_direction(self, offset=0):
'''
Interpolate bearing of photos in a sequence with an offset
@author: mprins
'''
bearings = {}
file_list = self.file_list
num_file = len(file_list)
if num_file > 1:
# sort based on EXIF capture time
capture_times, file_list = self.sort_file_list(file_list)
# read gps for ordered files
latlons = [self._read_lat_lon(filepath) for filepath in file_list]
if len(file_list) > 1:
# bearing between consecutive images
bearings = [lib.geo.compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1])
for ll1, ll2 in zip(latlons, latlons[1:])]
bearings.append(bearings[-1])
bearings = {file_list[i]: lib.geo.offset_bearing(b, offset) for i, b in enumerate(bearings)}
elif num_file == 1:
#if there is only one file in the list, just write the direction 0 and offset
bearings = {file_list[0]: lib.geo.offset_bearing(0.0, offset)}
return bearings
def interpolate_timestamp(self):
'''
Interpolate time stamps in case of identical timestamps within a sequence
'''
timestamps = []
file_list = self.file_list
num_file = len(file_list)
time_dict = OrderedDict()
capture_times, file_list = self.sort_file_list(file_list)
if num_file < 2:
return capture_times, file_list
# trace identical timestamps (always assume capture_times is sorted)
time_dict = OrderedDict()
for i, t in enumerate(capture_times):
if t not in time_dict:
time_dict[t] = {
"count": 0,
"pointer": 0
}
interval = 0
if i != 0:
interval = (t - capture_times[i-1]).total_seconds()
time_dict[capture_times[i-1]]["interval"] = interval
time_dict[t]["count"] += 1
if len(time_dict) >= 2:
# set time interval as the last available time interval
time_dict[time_dict.keys()[-1]]["interval"] = time_dict[time_dict.keys()[-2]]["interval"]
else:
# set time interval assuming capture interval is 1 second
time_dict[time_dict.keys()[0]]["interval"] = time_dict[time_dict.keys()[0]]["count"] * 1.
# interpolate timestampes
for f, t in zip(file_list, capture_times):
d = time_dict[t]
s = datetime.timedelta(seconds=d["pointer"] * d["interval"] / float(d["count"]))
updated_time = t + s
time_dict[t]["pointer"] += 1
timestamps.append(updated_time)
return timestamps, file_list
def remove_duplicates(self, min_distance=1e-5, min_angle=5):
'''
Detect duplidate photos in a folder
@source: a less general version of @simonmikkelsen's duplicate remover
'''
file_list = self.file_list
# ordered list by time
capture_times, file_list = self.sort_file_list(file_list)
# read gps for ordered files
latlons = [self._read_lat_lon(filepath) for filepath in file_list]
# read bearing for ordered files
bearings = [self._read_direction(filepath) for filepath in file_list]
# interploated bearings
interpolated_bearings = [lib.geo.compute_bearing(ll1[0], ll1[1], ll2[0], ll2[1])
for ll1, ll2 in zip(latlons, latlons[1:])]
interpolated_bearings.append(bearings[-1])
# use interploated bearings if bearing not available in EXIF
for i, b in enumerate(bearings):
bearings[i] = b if b is not None else interpolated_bearings[i]
is_duplicate = False
prev_unique = file_list[0]
prev_latlon = latlons[0]
prev_bearing = bearings[0]
groups = []
group = []
for i, filename in enumerate(file_list[1:]):
k = i+1
distance = lib.geo.gps_distance(latlons[k], prev_latlon)
if bearings[k] is not None and prev_bearing is not None:
bearing_diff = lib.geo.diff_bearing(bearings[k], prev_bearing)
else:
# Not use bearing difference if no bearings are available
bearing_diff = 360
if distance < min_distance and bearing_diff < min_angle:
is_duplicate = True
else:
prev_latlon = latlons[k]
prev_bearing = bearings[k]
if is_duplicate:
group.append(filename)
else:
if group:
groups.append(group)
group = []
is_duplicate = False
groups.append(group)
# move to filepath/duplicates/group_id (TODO: uploader should skip the duplicate folder)
self.move_groups(groups, 'duplicates')
print("Done remove duplicate photos in {} into {} groups".format(self.filepath, len(groups)))
return groups

356
lib/uploader.py Normal file
View File

@ -0,0 +1,356 @@
from lib.exif import EXIF
import lib.io
import json
import os
import string
import threading
import sys
import urllib2, urllib, httplib
import socket
import mimetypes
import random
import string
from Queue import Queue
import threading
import exifread
import time
MAPILLARY_UPLOAD_URL = "https://d22zcsn13kp53w.cloudfront.net/"
MAPILLARY_DIRECT_UPLOAD_URL = "https://s3-eu-west-1.amazonaws.com/mapillary.uploads.images"
PERMISSION_HASH = "eyJleHBpcmF0aW9uIjoiMjAyMC0wMS0wMVQwMDowMDowMFoiLCJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJtYXBpbGxhcnkudXBsb2Fkcy5pbWFnZXMifSxbInN0YXJ0cy13aXRoIiwiJGtleSIsIiJdLHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwiIl0sWyJjb250ZW50LWxlbmd0aC1yYW5nZSIsMCwyMDQ4NTc2MF1dfQ=="
SIGNATURE_HASH = "f6MHj3JdEq8xQ/CmxOOS7LvMxoI="
BOUNDARY_CHARS = string.digits + string.ascii_letters
NUMBER_THREADS = int(os.getenv('NUMBER_THREADS', '4'))
MAX_ATTEMPTS = int(os.getenv('MAX_ATTEMPTS', '10'))
UPLOAD_PARAMS = {"url": MAPILLARY_UPLOAD_URL, "permission": PERMISSION_HASH, "signature": SIGNATURE_HASH, "move_files":True, "keep_file_names": True}
CLIENT_ID = "MkJKbDA0bnZuZlcxeTJHTmFqN3g1dzo1YTM0NjRkM2EyZGU5MzBh"
LOGIN_URL = "https://a.mapillary.com/v2/ua/login?client_id={}".format(CLIENT_ID)
PROJECTS_URL = "https://a.mapillary.com/v3/users/{}/projects?client_id={}"
ME_URL = "https://a.mapillary.com/v3/me?client_id={}".format(CLIENT_ID)
class UploadThread(threading.Thread):
def __init__(self, queue, params=UPLOAD_PARAMS):
threading.Thread.__init__(self)
self.q = queue
self.params = params
self.total_task = self.q.qsize()
def run(self):
while True:
# fetch file from the queue and upload
filepath = self.q.get()
if filepath is None:
self.q.task_done()
break
else:
lib.io.progress(self.total_task-self.q.qsize(), self.total_task, '... {} images left.'.format(self.q.qsize()))
upload_file(filepath, **self.params)
self.q.task_done()
def create_dirs(root_path=''):
lib.io.mkdir_p(os.path.join(root_path, "success"))
lib.io.mkdir_p(os.path.join(root_path, "failed"))
def encode_multipart(fields, files, boundary=None):
"""
Encode dict of form fields and dict of files as multipart/form-data.
Return tuple of (body_string, headers_dict). Each value in files is a dict
with required keys 'filename' and 'content', and optional 'mimetype' (if
not specified, tries to guess mime type or uses 'application/octet-stream').
From MIT licensed recipe at
http://code.activestate.com/recipes/578668-encode-multipart-form-data-for-uploading-files-via/
"""
def escape_quote(s):
return s.replace('"', '\\"')
if boundary is None:
boundary = ''.join(random.choice(BOUNDARY_CHARS) for i in range(30))
lines = []
for name, value in fields.items():
lines.extend((
'--{0}'.format(boundary),
'Content-Disposition: form-data; name="{0}"'.format(escape_quote(name)),
'',
str(value),
))
for name, value in files.items():
filename = value['filename']
if 'mimetype' in value:
mimetype = value['mimetype']
else:
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
lines.extend((
'--{0}'.format(boundary),
'Content-Disposition: form-data; name="{0}"; filename="{1}"'.format(
escape_quote(name), escape_quote(filename)),
'Content-Type: {0}'.format(mimetype),
'',
value['content'],
))
lines.extend((
'--{0}--'.format(boundary),
'',
))
body = '\r\n'.join(lines)
headers = {
'Content-Type': 'multipart/form-data; boundary={0}'.format(boundary),
'Content-Length': str(len(body)),
}
return (body, headers)
def finalize_upload(params, retry=3, auto_done=False):
'''
Finalize and confirm upload
'''
# retry if input is unclear
for i in range(retry):
if not auto_done:
proceed = raw_input("Finalize upload? [y/n]: ")
else:
proceed = "y"
if proceed in ["y", "Y", "yes", "Yes"]:
# upload an empty DONE file
upload_done_file(params)
print("Done uploading.")
break
elif proceed in ["n", "N", "no", "No"]:
print("Aborted. No files were submitted. Try again if you had failures.")
break
else:
if i==2:
print("Aborted. No files were submitted. Try again if you had failures.")
else:
print('Please answer y or n. Try again.')
def get_upload_token(mail, pwd):
'''
Get upload token
'''
params = urllib.urlencode({"email": mail, "password": pwd})
response = urllib.urlopen(LOGIN_URL, params)
resp = json.loads(response.read())
return resp['token']
def get_authentication_info():
'''
Get authentication information from env
'''
try:
MAPILLARY_USERNAME = os.environ['MAPILLARY_USERNAME']
MAPILLARY_EMAIL = os.environ['MAPILLARY_EMAIL']
MAPILLARY_PASSWORD = os.environ['MAPILLARY_PASSWORD']
except KeyError:
return None
return MAPILLARY_USERNAME, MAPILLARY_EMAIL, MAPILLARY_PASSWORD
def get_full_authentication_info(user=None, email=None):
# Fetch full authetication info
try:
MAPILLARY_EMAIL = email if email is not None else os.environ['MAPILLARY_EMAIL']
MAPILLARY_SECRET_HASH = os.environ.get('MAPILLARY_SECRET_HASH', None)
MAPILLARY_UPLOAD_TOKEN = None
if MAPILLARY_SECRET_HASH is None:
MAPILLARY_PASSWORD = os.environ['MAPILLARY_PASSWORD']
MAPILLARY_PERMISSION_HASH = os.environ['MAPILLARY_PERMISSION_HASH']
MAPILLARY_SIGNATURE_HASH = os.environ['MAPILLARY_SIGNATURE_HASH']
MAPILLARY_UPLOAD_TOKEN = get_upload_token(MAPILLARY_EMAIL, MAPILLARY_PASSWORD)
UPLOAD_URL = MAPILLARY_UPLOAD_URL
else:
secret_hash = MAPILLARY_SECRET_HASH
MAPILLARY_PERMISSION_HASH = PERMISSION_HASH
MAPILLARY_SIGNATURE_HASH = SIGNATURE_HASH
UPLOAD_URL = MAPILLARY_DIRECT_UPLOAD_URL
return MAPILLARY_EMAIL, MAPILLARY_UPLOAD_TOKEN, MAPILLARY_SECRET_HASH, UPLOAD_URL
except KeyError:
print("You are missing one of the environment variables MAPILLARY_USERNAME, MAPILLARY_EMAIL, MAPILLARY_PASSWORD, MAPILLARY_PERMISSION_HASH or MAPILLARY_SIGNATURE_HASH. These are required.")
sys.exit()
def get_project_key(project_name, project_key=None):
'''
Get project key given project name
'''
if project_name is not None or project_key is not None:
# Get the JWT token
MAPILLARY_USERNAME, MAPILLARY_EMAIL, MAPILLARY_PASSWORD = get_authentication_info()
params = urllib.urlencode( {"email": MAPILLARY_EMAIL, "password": MAPILLARY_PASSWORD })
resp = json.loads(urllib.urlopen(LOGIN_URL, params).read())
token = resp['token']
# Get the user key
req = urllib2.Request(ME_URL)
req.add_header('Authorization', 'Bearer {}'.format(token))
resp = json.loads(urllib2.urlopen(req).read())
userkey = resp['key']
# Get the user key
req = urllib2.Request(PROJECTS_URL.format(userkey, CLIENT_ID))
req.add_header('Authorization', 'Bearer {}'.format(token))
resp = json.loads(urllib2.urlopen(req).read())
projects = resp
# check projects
found = False
print "Your projects: "
for project in projects:
print(project["name"])
project_name_matched = project['name'].encode('utf-8').decode('utf-8') == project_name
project_key_matched = project["key"] == project_key
if project_name_matched or project_key_matched:
found = True
return project['key']
if not found:
print "Project {} not found.".format(project_name)
return ""
def upload_done_file(params):
print("Upload a DONE file {} to indicate the sequence is all uploaded and ready to submit.".format(params['key']))
if not os.path.exists("DONE"):
open("DONE", 'a').close()
#upload
upload_file("DONE", **params)
#remove
if os.path.exists("DONE"):
os.remove("DONE")
def upload_file(filepath, url, permission, signature, key=None, move_files=True, keep_file_names=True):
'''
Upload file at filepath.
Move to subfolders 'success'/'failed' on completion if move_files is True.
'''
filename = os.path.basename(filepath)
if keep_file_names:
s3_filename = filename
else:
try:
s3_filename = EXIF(filepath).exif_name()
except:
s3_filename = filename
# add S3 'path' if given
if key is None:
s3_key = s3_filename
else:
s3_key = key+s3_filename
parameters = {"key": s3_key, "AWSAccessKeyId": "AKIAI2X3BJAT2W75HILA", "acl": "private",
"policy": permission, "signature": signature, "Content-Type":"image/jpeg" }
with open(filepath, "rb") as f:
encoded_string = f.read()
data, headers = encode_multipart(parameters, {'file': {'filename': filename, 'content': encoded_string}})
root_path = os.path.dirname(filepath)
success_path = os.path.join(root_path, 'success')
failed_path = os.path.join(root_path, 'failed')
lib.io.mkdir_p(success_path)
lib.io.mkdir_p(failed_path)
for attempt in range(MAX_ATTEMPTS):
# Initialize response before each attempt
response = None
try:
request = urllib2.Request(url, data=data, headers=headers)
response = urllib2.urlopen(request)
if response.getcode()==204:
if move_files:
os.rename(filepath, os.path.join(success_path, filename))
# print("Success: {0}".format(filename))
else:
if move_files:
os.rename(filepath, os.path.join(failed_path, filename))
print("Failed: {0}".format(filename))
break # attempts
except urllib2.HTTPError as e:
print("HTTP error: {0} on {1}".format(e, filename))
time.sleep(5)
except urllib2.URLError as e:
print("URL error: {0} on {1}".format(e, filename))
time.sleep(5)
except httplib.HTTPException as e:
print("HTTP exception: {0} on {1}".format(e, filename))
time.sleep(5)
except OSError as e:
print("OS error: {0} on {1}".format(e, filename))
time.sleep(5)
except socket.timeout as e:
# Specific timeout handling for Python 2.7
print("Timeout error: {0} (retrying)".format(filename))
finally:
if response is not None:
response.close()
def upload_file_list(file_list, params=UPLOAD_PARAMS):
# create upload queue with all files
q = Queue()
for filepath in file_list:
q.put(filepath)
# create uploader threads
uploaders = [UploadThread(q, params) for i in range(NUMBER_THREADS)]
# start uploaders as daemon threads that can be stopped (ctrl-c)
try:
print("Uploading with {} threads".format(NUMBER_THREADS))
for uploader in uploaders:
uploader.daemon = True
uploader.start()
for uploader in uploaders:
uploaders[i].join(1)
while q.unfinished_tasks:
time.sleep(1)
q.join()
except (KeyboardInterrupt, SystemExit):
print("\nBREAK: Stopping upload.")
sys.exit()
def upload_summary(file_list, total_uploads, split_groups, duplicate_groups, missing_groups):
total_success = len([f for f in file_list if 'success' in f])
total_failed = len([f for f in file_list if 'failed' in f])
lines = []
if duplicate_groups:
lines.append('Duplicates (skipping):')
lines.append(' groups: {}'.format(len(duplicate_groups)))
lines.append(' total: {}'.format(sum([len(g) for g in duplicate_groups])))
if missing_groups:
lines.append('Missing Required EXIF (skipping):')
lines.append(' total: {}'.format(sum([len(g) for g in missing_groups])))
lines.append('Sequences:')
lines.append(' groups: {}'.format(len(split_groups)))
lines.append(' total: {}'.format(sum([len(g) for g in split_groups])))
lines.append('Uploads:')
lines.append(' total uploads this run: {}'.format(total_uploads))
lines.append(' total: {}'.format(total_success+total_failed))
lines.append(' success: {}'.format(total_success))
lines.append(' failed: {}'.format(total_failed))
lines = '\n'.join(lines)
return lines