Skip to content

Commit

Permalink
Merge pull request #169 from developmentseed/develop
Browse files Browse the repository at this point in the history
v0.12.2
  • Loading branch information
Scisco committed Mar 25, 2016
2 parents 14f14ec + 18120db commit cd100f6
Show file tree
Hide file tree
Showing 10 changed files with 197 additions and 191 deletions.
5 changes: 5 additions & 0 deletions CHANGES.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
Changes
=======

0.12.2 (2016-02-18)
------------------
- Fix for #167
- Fix for #145

0.12.0 (2016-02-18)
------------------
- Add USGS download fallback closes #89
Expand Down
2 changes: 1 addition & 1 deletion landsat/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.12.1'
__version__ = '0.12.2'
170 changes: 85 additions & 85 deletions landsat/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,42 +58,49 @@ def download(self, scenes, bands=None):
"""

if isinstance(scenes, list):
output = {}
files = []

for scene in scenes:
# If bands are provided the image is from 2015 or later use Amazon
self.scene_interpreter(scene)

if (bands and int(scene[12]) > 4):
if isinstance(bands, list):
# Create a folder to download the specific bands into
path = check_create_folder(join(self.download_dir, scene))
try:
# Always grab MTL.txt if bands are specified
if 'BQA' not in bands:
bands.append('QA')

if 'MTL' not in bands:
bands.append('MTL')

for band in bands:
self.amazon_s3(scene, band, path)
output[scene] = 'aws'
except RemoteFileDoesntExist:
self.google_storage(scene, self.download_dir)
output[scene] = 'google'

else:
raise Exception('Expected bands list')
else:
self.google_storage(scene, self.download_dir)
output[scene] = 'google'

return output

# for all scenes if bands provided, first check AWS, if the bands exist
# download them, otherwise use Google and then USGS.
try:
# if bands are not provided, directly go to Goodle and then USGS
if not isinstance(bands, list):
raise RemoteFileDoesntExist
files.append(self.amazon_s3(scene, bands))

except RemoteFileDoesntExist:
try:
files.append(self.google_storage(scene, self.download_dir))
except RemoteFileDoesntExist:
files.append(self.usgs_eros(scene, self.download_dir))

return files

else:
raise Exception('Expected sceneIDs list')

def usgs_eros(self, scene, path):
""" Downloads the image from USGS """

# download from usgs if login information is provided
if self.usgs_user and self.usgs_pass:
try:
api_key = api.login(self.usgs_user, self.usgs_pass)
except USGSError as e:
error_tree = ElementTree.fromstring(str(e.message))
error_text = error_tree.find("SOAP-ENV:Body/SOAP-ENV:Fault/faultstring", api.NAMESPACES).text
raise USGSInventoryAccessMissing(error_text)

download_url = api.download('LANDSAT_8', 'EE', [scene], api_key=api_key)
if download_url:
self.output('Source: USGS EarthExplorer', normal=True, arrow=True)
return self.fetch(download_url[0], path)

raise RemoteFileDoesntExist('%s is not available on AWS S3, Google or USGS Earth Explorer' % scene)
raise RemoteFileDoesntExist('%s is not available on AWS S3 or Google Storage' % scene)

def google_storage(self, scene, path):
"""
Google Storage Downloader.
Expand All @@ -110,67 +117,49 @@ def google_storage(self, scene, path):
:returns:
Boolean
"""
sat = self.scene_interpreter(scene)

filename = scene + '.tar.bz'
sat = self.scene_interpreter(scene)
url = self.google_storage_url(sat)

if self.remote_file_exists(url):
return self.fetch(url, path, filename)
self.remote_file_exists(url)

else:
# download from usgs if login information is provided
if self.usgs_user and self.usgs_pass:
try:
api_key = api.login(self.usgs_user, self.usgs_pass)
except USGSError as e:
error_tree = ElementTree.fromstring(str(e.message))
error_text = error_tree.find("SOAP-ENV:Body/SOAP-ENV:Fault/faultstring", api.NAMESPACES).text
raise USGSInventoryAccessMissing(error_text)
self.output('Source: Google Storge', normal=True, arrow=True)
return self.fetch(url, path)

download_url = api.download('LANDSAT_8', 'EE', [scene], api_key=api_key)
if download_url:
return self.fetch(download_url[0], path, filename)
def amazon_s3(self, scene, bands):
"""
Amazon S3 downloader
"""

raise RemoteFileDoesntExist('%s is not available on AWS S3, Google or USGS Earth Explorer' % filename)
sat = self.scene_interpreter(scene)

raise RemoteFileDoesntExist('%s is not available on AWS S3 or Google Storage' % filename)
# Always grab MTL.txt and QA band if bands are specified
if 'BQA' not in bands:
bands.append('QA')

def amazon_s3(self, scene, band, path):
"""
Amazon S3 downloader
if 'MTL' not in bands:
bands.append('MTL')

:param scene:
The scene ID.
:type scene:
String
:param band:
The band number.
:type band:
String, Integer
:param path:
The directory path to where the image should be stored
:type path:
String
urls = []

:returns:
Boolean
"""
sat = self.scene_interpreter(scene)
for band in bands:
# get url for the band
url = self.amazon_s3_url(sat, band)

if band != 'MTL':
filename = '%s_B%s.TIF' % (scene, band)
else:
filename = '%s_%s.txt' % (scene, band)
url = self.amazon_s3_url(sat, filename)
# make sure it exist
self.remote_file_exists(url)
urls.append(url)

if self.remote_file_exists(url):
return self.fetch(url, path, filename)
# create folder
path = check_create_folder(join(self.download_dir, scene))

else:
raise RemoteFileDoesntExist('%s is not available on Amazon S3' % filename)
self.output('Source: AWS S3', normal=True, arrow=True)
for url in urls:
self.fetch(url, path)

return path

def fetch(self, url, path, filename):
def fetch(self, url, path):
""" Downloads the given url.
:param url:
Expand All @@ -190,18 +179,26 @@ def fetch(self, url, path, filename):
Boolean
"""

segments = url.split('/')
filename = segments[-1]

# remove query parameters from the filename
filename = filename.split('?')[0]

self.output('Downloading: %s' % filename, normal=True, arrow=True)

# print(join(path, filename))
# raise Exception
if exists(join(path, filename)):
size = getsize(join(path, filename))
if size == self.get_remote_file_size(url):
self.output('%s already exists on your system' % filename, normal=True, color='green', indent=1)
return False

fetch(url, path)
else:
fetch(url, path)
self.output('stored at %s' % path, normal=True, color='green', indent=1)

return True
return join(path, filename)

def google_storage_url(self, sat):
"""
Expand All @@ -218,7 +215,7 @@ def google_storage_url(self, sat):
filename = sat['scene'] + '.tar.bz'
return url_builder([self.google, sat['sat'], sat['path'], sat['row'], filename])

def amazon_s3_url(self, sat, filename):
def amazon_s3_url(self, sat, band):
"""
Return an amazon s3 url the contains the scene and band provided.
Expand All @@ -234,6 +231,11 @@ def amazon_s3_url(self, sat, filename):
:returns:
(String) The URL to a S3 file
"""
if band != 'MTL':
filename = '%s_B%s.TIF' % (sat['scene'], band)
else:
filename = '%s_%s.txt' % (sat['scene'], band)

return url_builder([self.s3, sat['sat'], sat['path'], sat['row'], sat['scene'], filename])

def remote_file_exists(self, url):
Expand All @@ -249,10 +251,8 @@ def remote_file_exists(self, url):
"""
status = requests.head(url).status_code

if status == 200:
return True
else:
return False
if status != 200:
raise RemoteFileDoesntExist

def get_remote_file_size(self, url):
""" Gets the filesize of a remote file.
Expand Down
30 changes: 14 additions & 16 deletions landsat/landsat.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,8 +324,15 @@ def main(args):
s = Search()

try:
lat = float(args.lat) if args.lat else None
lon = float(args.lon) if args.lon else None
if args.lat is not None:
lat = float(args.lat)
else:
lat = None

if args.lon is not None:
lon = float(args.lon)
else:
lon = None
except ValueError:
return ["The latitude and longitude values must be valid numbers", 1]

Expand Down Expand Up @@ -392,23 +399,14 @@ def main(args):
if not args.bands:
bands = [4, 3, 2]

downloaded = d.download(args.scenes, bands)
files = d.download(args.scenes, bands)

if args.process:
if not args.bands:
args.bands = '432'
force_unzip = True if args.force_unzip else False
for scene, src in downloaded.iteritems():
if args.dest:
path = join(args.dest, scene)
else:
path = join(settings.DOWNLOAD_DIR, scene)

# Keep using Google if the image is before 2015
if src == 'google':
path = path + '.tar.bz'

stored = process_image(path, args.bands, False, args.pansharpen, args.ndvi, force_unzip,
for f in files:
stored = process_image(f, args.bands, False, args.pansharpen, args.ndvi, force_unzip,
args.ndvigrey, bounds=bounds)

if args.upload:
Expand Down Expand Up @@ -467,8 +465,8 @@ def process_image(path, bands=None, verbose=False, pansharpen=False, ndvi=False,
p = Simple(path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip,
bounds=bounds)

except IOError:
exit("Zip file corrupted", 1)
except IOError as e:
exit(e.message, 1)
except FileDoesNotExist as e:
exit(e.message, 1)

Expand Down
2 changes: 1 addition & 1 deletion landsat/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def query_builder(self, paths_rows=None, lat=None, lon=None, address=None, start

if address:
query.append(self.address_builder(address))
elif lat and lon:
elif (lat is not None) and (lon is not None):
query.append(self.lat_lon_builder(lat, lon))

if query:
Expand Down
2 changes: 1 addition & 1 deletion requirements/docker.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ boto>=2.38.0
polyline==1.1
geocoder>=1.5.1
jsonschema==2.5.1
git+git://github.com/developmentseed/usgs@develop
usgs==0.1.9
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def readme():
license='CCO',
platforms='Posix; MacOS X; Windows',
install_requires=[
'usgs2==0.2.0',
'usgs==0.1.9',
'requests==2.7.0',
'python-dateutil>=2.4.2',
'numpy>=1.9.3',
Expand All @@ -47,7 +47,8 @@ def readme():
'homura>=0.1.2',
'boto>=2.38.0',
'polyline==1.1',
'geocoder>=1.5.1'
'geocoder>=1.5.1',
'matplotlib==1.5.1'
],
test_suite='nose.collector',
tests_require=test_requirements
Expand Down
Loading

0 comments on commit cd100f6

Please sign in to comment.