diff --git a/README.md b/README.md index dc8828fad..cf0caaeb8 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ utility in a general sense for building other types of software packages. In its InSAR aspect ISCE supports data from many space-borne satellites and one air-borne platform. We continue to increase the number of sensors supported. At this time the sensors that are supported are the following: ALOS, ALOS2, -COSMO_SKYMED, ENVISAT, ERS, KOMPSAT5, RADARSAT1, RADARSAT2, RISAT1, Sentinel1, +COSMO_SKYMED, ENVISAT, ERS, KOMPSAT5, LUTAN1, RADARSAT1, RADARSAT2, RISAT1, Sentinel1, TERRASARX, UAVSAR and SAOCOM1A. ## Contents diff --git a/applications/make_raw.py b/applications/make_raw.py index a7fad5ee2..b7dfbe8ea 100755 --- a/applications/make_raw.py +++ b/applications/make_raw.py @@ -226,7 +226,7 @@ def calculateSquint(self): self.frame.squintAngle = math.radians(self.squint) def make_raw(self): - from isceobj.Image import createRawImage, createSlcImage + from isceobj.Image import createRawImage, createSlcImage self.activateInputPorts() # Parse the image metadata and extract the image diff --git a/components/isceobj/Alos2Proc/runFrameOffset.py b/components/isceobj/Alos2Proc/runFrameOffset.py index 635e94c9f..59356688d 100644 --- a/components/isceobj/Alos2Proc/runFrameOffset.py +++ b/components/isceobj/Alos2Proc/runFrameOffset.py @@ -97,18 +97,23 @@ def frameOffset(track, image, outputfile, crossCorrelation=True, matchingMode=0) swath1 = track.frames[j-1].swaths[0] swath2 = track.frames[j].swaths[0] + #consider frame/swath azimuth sensing start differences caused by swath mosaicking + # tested with alos2App.py, alos2burstApp.py and alosStack + frame1 = track.frames[j-1] + frame2 = track.frames[j] + delta_az = -((frame2.sensingStart - frame1.sensingStart).total_seconds() - (swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval #offset from geometry offsetGeometrical = computeFrameOffset(swath1, swath2) rangeOffsetGeometrical.append(offsetGeometrical[0]) - azimuthOffsetGeometrical.append(offsetGeometrical[1]) + azimuthOffsetGeometrical.append(offsetGeometrical[1]+delta_az) #offset from cross-correlation if crossCorrelation: offsetMatching = estimateFrameOffset(swath1, swath2, image1, image2, matchingMode=matchingMode) if offsetMatching != None: rangeOffsetMatching.append(offsetMatching[0]) - azimuthOffsetMatching.append(offsetMatching[1]) + azimuthOffsetMatching.append(offsetMatching[1]+delta_az) else: print('******************************************************************') print('WARNING: bad matching offset, we are forced to use') diff --git a/components/isceobj/Orbit/ODR.py b/components/isceobj/Orbit/ODR.py index 570f09362..0a5bd5cb3 100644 --- a/components/isceobj/Orbit/ODR.py +++ b/components/isceobj/Orbit/ODR.py @@ -184,15 +184,11 @@ def parse(self): def parseLine(self,line): arc = Arc() - arc.number = line[0:3] # Arc number - arc.start = datetime.datetime.strptime(line[5:17],'%y%m%d %H:%M') # Starting time for the arc - arc.stop = datetime.datetime.strptime(line[20:32],'%y%m%d %H:%M') # End time for the arc - arc.slrResidual = line[34:38] # Satellite laser ranging residual in cm - arc.crossOver = line[39:43] - arc.altimeter = line[45:49] - arc.repeat = line[51:57] # Repeat cycle in days - arc.version = line[58:61] # Version number - arc.precise = datetime.datetime.strptime(line[63:78],'%y%m%d %H:%M:%S') # Starting time of the precise segment of the arc + ###### Change to also work for the new arclist format in the recalcuated ODR orbit files.####### + arc.number = line.split()[0] # Arc number + arc.start = datetime.datetime.strptime(" ".join(line.split()[1:3]),'%y%m%d %H:%M') # Starting time for the arc + arc.stop = datetime.datetime.strptime(" ".join(line.split()[4:6]),'%y%m%d %H:%M') # End time for the arc + arc.precise = datetime.datetime.strptime(" ".join(line.split()[-2:]),'%y%m%d %H:%M:%S') # Starting time of the precise segment of the arc return arc diff --git a/components/isceobj/Sensor/ALOS2.py b/components/isceobj/Sensor/ALOS2.py index a45d82194..ec1c0c3de 100755 --- a/components/isceobj/Sensor/ALOS2.py +++ b/components/isceobj/Sensor/ALOS2.py @@ -106,7 +106,7 @@ def __init__(self, name=''): self.leaderFile = None self.imageFile = None - #####Soecific doppler functions for ALOS2 + #####Specific doppler functions for ALOS2 self.doppler_coeff = None self.azfmrate_coeff = None self.lineDirection = None diff --git a/components/isceobj/Sensor/CMakeLists.txt b/components/isceobj/Sensor/CMakeLists.txt index 6fef2cb36..0348ecedc 100644 --- a/components/isceobj/Sensor/CMakeLists.txt +++ b/components/isceobj/Sensor/CMakeLists.txt @@ -41,6 +41,7 @@ set(installfiles UAVSAR_RPI.py UAVSAR_Stack.py SAOCOM_SLC.py + Lutan1.py ) if(HDF5_FOUND) diff --git a/components/isceobj/Sensor/Lutan1.py b/components/isceobj/Sensor/Lutan1.py new file mode 100644 index 000000000..ca144852c --- /dev/null +++ b/components/isceobj/Sensor/Lutan1.py @@ -0,0 +1,323 @@ +#!/usr/bin/python3 + +# Reader for Lutan-1 SLC data +# Used Sentinel1.py and ALOS.py as templates +# Author: Bryan Marfito, EOS-RS + + +import os +import glob +import numpy as np +import xml.etree.ElementTree as ET +import datetime +import isce +import isceobj +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Sensor.Sensor import Sensor +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from isceobj.Orbit.OrbitExtender import OrbitExtender +from osgeo import gdal +import warnings + +lookMap = { 'RIGHT' : -1, + 'LEFT' : 1} + +# Antenna dimensions 9.8 x 3.4 m +antennaLength = 9.8 + +XML = Component.Parameter('xml', + public_name = 'xml', + default = None, + type = str, + doc = 'Input XML file') + + +TIFF = Component.Parameter('tiff', + public_name ='tiff', + default = None, + type=str, + doc = 'Input image file') + +ORBIT_FILE = Component.Parameter('orbitFile', + public_name ='orbitFile', + default = None, + type=str, + doc = 'Orbit file') + + +class Lutan1(Sensor): + + "Class for Lutan-1 SLC data" + + family = 'l1sm' + logging_name = 'isce.sensor.Lutan1' + + parameter_list = (TIFF, ORBIT_FILE) + Sensor.parameter_list + + def __init__(self, name = ''): + super(Lutan1,self).__init__(self.__class__.family, name=name) + self.frame = Frame() + self.frame.configure() + self._xml_root = None + self.doppler_coeff = None + + def parse(self): + xmlFileName = self.tiff[:-4] + "meta.xml" + self.xml = xmlFileName + + with open(self.xml, 'r') as fid: + xmlstr = fid.read() + + self._xml_root = ET.fromstring(xmlstr) + self.populateMetadata() + fid.close() + + if self.orbitFile: + # Check if orbit file exists or not + if os.path.isfile(self.orbitFile) == True: + orb = self.extractOrbit() + self.frame.orbit.setOrbitSource(os.path.basename(self.orbitFile)) + else: + pass + else: + warnings.warn("WARNING! No orbit file found. Orbit information from the annotation file is used for processing.") + orb = self.extractOrbitFromAnnotation() + self.frame.orbit.setOrbitSource(os.path.basename(self.xml)) + self.frame.orbit.setOrbitSource('Annotation') + + for sv in orb: + self.frame.orbit.addStateVector(sv) + + def convertToDateTime(self,string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%f") + return dt + + + def grab_from_xml(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + + def populateMetadata(self): + mission = self.grab_from_xml('generalHeader/mission') + polarization = self.grab_from_xml('productInfo/acquisitionInfo/polarisationMode') + frequency = float(self.grab_from_xml('instrument/radarParameters/centerFrequency')) + passDirection = self.grab_from_xml('productInfo/missionInfo/orbitDirection') + rangePixelSize = float(self.grab_from_xml('productInfo/imageDataInfo/imageRaster/columnSpacing')) + azimuthPixelSize = float(self.grab_from_xml('productInfo/imageDataInfo/imageRaster/rowSpacing')) + rangeSamplingRate = Const.c/(2.0*rangePixelSize) + + prf = float(self.grab_from_xml('instrument/settings/settingRecord/PRF')) + lines = int(self.grab_from_xml('productInfo/imageDataInfo/imageRaster/numberOfRows')) + samples = int(self.grab_from_xml('productInfo/imageDataInfo/imageRaster/numberOfColumns')) + + startingRange = float(self.grab_from_xml('productInfo/sceneInfo/rangeTime/firstPixel'))*Const.c/2.0 + #slantRange = float(self.grab_from_xml('productSpecific/complexImageInfo/')) + incidenceAngle = float(self.grab_from_xml('productInfo/sceneInfo/sceneCenterCoord/incidenceAngle')) + dataStartTime = self.convertToDateTime(self.grab_from_xml('productInfo/sceneInfo/start/timeUTC')) + dataStopTime = self.convertToDateTime(self.grab_from_xml('productInfo/sceneInfo/stop/timeUTC')) + pulseLength = float(self.grab_from_xml('processing/processingParameter/rangeCompression/chirps/referenceChirp/pulseLength')) + pulseBandwidth = float(self.grab_from_xml('processing/processingParameter/rangeCompression/chirps/referenceChirp/pulseBandwidth')) + chirpSlope = pulseBandwidth/pulseLength + + if self.grab_from_xml('processing/processingParameter/rangeCompression/chirps/referenceChirp/chirpSlope') == "DOWN": + chirpSlope = -1.0 * chirpSlope + else: + pass + + # Check for satellite's look direction + if self.grab_from_xml('productInfo/acquisitionInfo/lookDirection') == "LEFT": + lookSide = lookMap['LEFT'] + print("Look direction: LEFT") + else: + lookSide = lookMap['RIGHT'] + print("Look direction: RIGHT") + + processingFacility = self.grab_from_xml('productInfo/generationInfo/level1ProcessingFacility') + + # Platform parameters + platform = self.frame.getInstrument().getPlatform() + platform.setPlanet(Planet(pname='Earth')) + platform.setMission(mission) + platform.setPointingDirection(lookSide) + platform.setAntennaLength(antennaLength) + + # Instrument parameters + instrument = self.frame.getInstrument() + instrument.setRadarFrequency(frequency) + instrument.setPulseRepetitionFrequency(prf) + instrument.setPulseLength(pulseLength) + instrument.setChirpSlope(chirpSlope) + instrument.setIncidenceAngle(incidenceAngle) + instrument.setRangePixelSize(rangePixelSize) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setPulseLength(pulseLength) + + # Frame parameters + self.frame.setSensingStart(dataStartTime) + self.frame.setSensingStop(dataStopTime) + self.frame.setProcessingFacility(processingFacility) + + # Two-way travel time + diffTime = DTUtil.timeDeltaToSeconds(dataStopTime - dataStartTime) / 2.0 + sensingMid = dataStartTime + datetime.timedelta(microseconds=int(diffTime*1e6)) + self.frame.setSensingMid(sensingMid) + self.frame.setPassDirection(passDirection) + self.frame.setPolarization(polarization) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(startingRange + (samples - 1) * rangePixelSize) + self.frame.setNumberOfLines(lines) + self.frame.setNumberOfSamples(samples) + + return + + + def extractOrbit(self): + + ''' + Extract orbit information from the orbit file + ''' + + try: + fp = open(self.orbitFile, 'r') + except IOError as strerr: + print("IOError: %s" % strerr) + + _xml_root = ET.ElementTree(file=fp).getroot() + node = _xml_root.find('Data_Block/List_of_OSVs') + + orb = Orbit() + orb.configure() + + # I based the margin on the data that I have. + # Lutan-1 position and velocity sampling frequency is 1 Hz + margin = datetime.timedelta(seconds=1.0) + tstart = self.frame.getSensingStart() - margin + tend = self.frame.getSensingStop() + margin + + for child in node: + timestamp = self.convertToDateTime(child.find('UTC').text) + if (timestamp >= tstart) and (timestamp <= tend): + pos = [] + vel = [] + for tag in ['VX', 'VY', 'VZ']: + vel.append(float(child.find(tag).text)) + + for tag in ['X', 'Y', 'Z']: + pos.append(float(child.find(tag).text)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + fp.close() + + return orb + + def extractOrbitFromAnnotation(self): + + ''' + Extract orbit information from xml annotation + WARNING! Only use this method if orbit file is not available + ''' + + try: + fp = open(self.xml, 'r') + except IOError as strerr: + print("IOError: %s" % strerr) + + _xml_root = ET.ElementTree(file=fp).getroot() + node = _xml_root.find('platform/orbit') + countNode = len(list(_xml_root.find('platform/orbit'))) + + frameOrbit = Orbit() + frameOrbit.setOrbitSource('Header') + margin = datetime.timedelta(seconds=1.0) + tstart = self.frame.getSensingStart() - margin + tend = self.frame.getSensingStop() + margin + + for k in range(1,countNode): + timestamp = self.convertToDateTime(node.find('stateVec[{}]/timeUTC'.format(k)).text) + if (timestamp >= tstart) and (timestamp <= tend): + pos = [float(node.find('stateVec[{}]/posX'.format(k)).text), float(node.find('stateVec[{}]/posY'.format(k)).text), float(node.find('stateVec[{}]/posZ'.format(k)).text)] + vel = [float(node.find('stateVec[{}]/velX'.format(k)).text), float(node.find('stateVec[{}]/velY'.format(k)).text), float(node.find('stateVec[{}]/velZ'.format(k)).text)] + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + frameOrbit.addStateVector(vec) + + fp.close() + return frameOrbit + + def extractImage(self): + self.parse() + width = self.frame.getNumberOfSamples() + lgth = self.frame.getNumberOfLines() + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + + # Band 1 as real and band 2 as imaginary numbers + # Confirmed by Zhang Yunjun + band1 = src.GetRasterBand(1) + band2 = src.GetRasterBand(2) + cJ = np.complex64(1.0j) + + fid = open(self.output, 'wb') + for ii in range(lgth): + # Combine the real and imaginary to make + # them in to complex numbers + real = band1.ReadAsArray(0,ii,width,1) + imag = band2.ReadAsArray(0,ii,width,1) + # Data becomes np.complex128 after combining them + data = real + (cJ * imag) + data.tofile(fid) + + fid.close() + real = None + imag = None + src = None + band1 = None + band2 = None + + #### + slcImage = isceobj.createSlcImage() + slcImage.setByteOrder('l') + slcImage.setFilename(self.output) + slcImage.setAccessMode('read') + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setLength(self.frame.getNumberOfLines()) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + self.frame.setImage(slcImage) + + def extractDoppler(self): + ''' + Set doppler values to zero since the metadata doppler values are unreliable. + Also, the SLC images are zero doppler. + ''' + dop = [0., 0., 0.] + + ####For insarApp + quadratic = {} + quadratic['a'] = dop[0] / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + print("Average doppler: ", dop) + self.frame._dopplerVsPixel = dop + + return quadratic diff --git a/components/isceobj/Sensor/SConscript b/components/isceobj/Sensor/SConscript index 7e493c422..d526e2217 100644 --- a/components/isceobj/Sensor/SConscript +++ b/components/isceobj/Sensor/SConscript @@ -28,7 +28,7 @@ listFiles = ['ALOS.py','CEOS.py','COSMO_SkyMed.py','COSMO_SkyMed_SLC.py', 'UAVSAR_Polsar.py', 'ERS_EnviSAT.py', 'ICEYE_SLC.py', 'ALOS2.py', 'ERS_SLC.py', 'ALOS_SLC.py', 'EnviSAT_SLC.py', 'ERS_EnviSAT_SLC.py', 'SICD_RGZERO.py','UAVSAR_HDF5_SLC.py', - 'SAOCOM_SLC.py','__init__.py'] + 'SAOCOM_SLC.py','__init__.py','Lutan1.py'] helpList,installHelp = envSensor['HELP_BUILDER'](envSensor,'__init__.py',install) envSensor.Install(installHelp,helpList) diff --git a/components/isceobj/Sensor/Sentinel1.py b/components/isceobj/Sensor/Sentinel1.py index 6f3316c4c..ad9379a9f 100755 --- a/components/isceobj/Sensor/Sentinel1.py +++ b/components/isceobj/Sensor/Sentinel1.py @@ -45,6 +45,7 @@ from isceobj.Planet.AstronomicalHandbook import Const from iscesys.Component.Component import Component from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from isceobj.Util import Poly1D import os import glob import numpy as np @@ -359,7 +360,6 @@ def populateMetadata(self): self.frame.setNumberOfLines(lines) self.frame.setNumberOfSamples(samples) - self.frame.setPassDirection(passDirection) def extractOrbitFromAnnotation(self): ''' @@ -480,7 +480,6 @@ def extractDoppler(self): self.parse() Extract doppler information as needed by mocomp ''' - from isceobj.Util import Poly1D node = self._xml_root.find('dopplerCentroid/dcEstimateList') @@ -518,7 +517,7 @@ def extractDoppler(self): ###Actual Doppler Polynomial for accurate processing - ###Will be used in roiApp + ###Will be used in stripmapApp pix = np.linspace(0, self.frame.getNumberOfSamples(), num=dpoly._order+2) rngs = self.frame.startingRange + pix * self.frame.getInstrument().getRangePixelSize() evals = dpoly(rngs) diff --git a/components/isceobj/Sensor/__init__.py b/components/isceobj/Sensor/__init__.py index 2a3bc86f4..15927bb0b 100755 --- a/components/isceobj/Sensor/__init__.py +++ b/components/isceobj/Sensor/__init__.py @@ -99,6 +99,7 @@ def factory_template(sat,name=None): createICEYE_SLC = partial(factory_template, 'ICEYE_SLC') createUAVSAR_Hdf5_SLC = partial(factory_template, 'UAVSAR_HDF5_SLC') createSAOCOM_SLC = partial(factory_template, 'SAOCOM_SLC') +createLUTAN1 = partial(factory_template, 'Lutan1') SENSORS = {'ALOS' : createALOS, 'ALOS_SLC' : createALOS_SLC, @@ -125,7 +126,8 @@ def factory_template(sat,name=None): 'SICD_RGZERO' : createSICD_RGZERO, 'ICEYE_SLC' : createICEYE_SLC, 'UAVSAR_HDF5_SLC' : createUAVSAR_Hdf5_SLC, - 'SAOCOM_SLC': createSAOCOM_SLC} + 'SAOCOM_SLC': createSAOCOM_SLC, + 'LUTAN1': createLUTAN1} #These are experimental and can be added in as they become ready # 'JERS': createJERS, diff --git a/components/isceobj/StripmapProc/Factories.py b/components/isceobj/StripmapProc/Factories.py index 244eef22f..6a3642d49 100644 --- a/components/isceobj/StripmapProc/Factories.py +++ b/components/isceobj/StripmapProc/Factories.py @@ -52,7 +52,7 @@ def isRawSensor(sensor): ''' Check if input data is raw / slc. ''' - if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'uavsar_rpi','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc']: + if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'uavsar_rpi','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc', 'lutan1']: return False else: return True @@ -63,7 +63,7 @@ def isZeroDopplerSLC(sensor): Check if SLC is zero doppler / native doppler. ''' - if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','envisat_slc','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc']: + if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','envisat_slc','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc', 'lutan1']: return True elif sensor.lower() in ['alos_slc', 'uavsar_rpi']: return False @@ -76,7 +76,7 @@ def getDopplerMethod(sensor): Return appropriate doppler method based on user input. ''' - if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'uavsar_rpi','cosmo_skymed','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc', 'roi_pac']: + if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'uavsar_rpi','cosmo_skymed','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc', 'roi_pac','lutan1']: res = 'useDEFAULT' else: res = 'useDOPIQ' diff --git a/components/isceobj/StripmapProc/runTopo.py b/components/isceobj/StripmapProc/runTopo.py index d1e031a63..3f0648817 100755 --- a/components/isceobj/StripmapProc/runTopo.py +++ b/components/isceobj/StripmapProc/runTopo.py @@ -87,10 +87,8 @@ def runTopo(self): # topo.incFilename = os.path.join(info.outdir, 'inc.rdr') # topo.maskFilename = os.path.join(info.outdir, 'mask.rdr') - ####Doppler adjustment dop = [x/1.0 for x in info._dopplerVsPixel] - doppler = Poly2D() doppler.setWidth(topo.width // topo.numberRangeLooks) doppler.setLength(topo.length // topo.numberAzimuthLooks) diff --git a/components/isceobj/Util/src/cfft1d_jpl.F b/components/isceobj/Util/src/cfft1d_jpl.F index 9e34a0f27..ef5ab06cb 100644 --- a/components/isceobj/Util/src/cfft1d_jpl.F +++ b/components/isceobj/Util/src/cfft1d_jpl.F @@ -4,7 +4,7 @@ subroutine cfft1d_jpl(n,c,dir) integer*4 n, dir, ier complex*8 c(*) integer nmax - parameter (nmax = 65536) !32768) + parameter (nmax = 131072) !32768) interface integer(C_INT) function sfftw_import_wisdom_from_filename(filename) bind(C, name='sfftw_import_wisdom_from_filename') use, intrinsic :: iso_c_binding @@ -17,7 +17,7 @@ end function sfftw_import_wisdom_from_filename cc #if defined(HP) || defined(HAVE_C1DFFT) - + real*4 work128k(5*131072/2) real*4 work64k(5*65536/2) real*4 work32k(5*32768/2) real*4 work16(5*16384/2),work8(5*8192/2),work4(5*4096/2) @@ -31,6 +31,7 @@ end function sfftw_import_wisdom_from_filename if(dir .eq. 0) then + if(n.eq.131072)call c1dfft(c,n,work128k,-3,ier) if(n.eq.65536)call c1dfft(c,n,work64k,-3,ier) if(n.eq.32768)call c1dfft(c,n,work32k,-3,ier) if(n.eq.16384)call c1dfft(c,n,work16,-3,ier) @@ -51,7 +52,7 @@ end function sfftw_import_wisdom_from_filename end if return end if - + if(n.eq.131072)call c1dfft(c,n,work128k,-dir,ier) if(n.eq.65536)call c1dfft(c,n,work64k,-dir,ier) if(n.eq.32768)call c1dfft(c,n,work32k,-dir,ier) if(n.eq.16384)call c1dfft(c,n,work16,-dir,ier) @@ -80,7 +81,7 @@ end function sfftw_import_wisdom_from_filename c NOTE: if above condition changed, also need to update cffts.F c The should be updated in the future to use SCSL routines - + complex*8 work128k(131072+15) complex*8 work64k(65536+15) complex*8 work32k(32768+15),work16k(16384+15) complex*8 work8k(8192+15),work4k(4096+15) @@ -88,7 +89,7 @@ end function sfftw_import_wisdom_from_filename complex*8 work512(512+15),work256(256+15) complex*8 work128(5*128/2),work64(64+15) complex*8 work32(32+15),work16(16+15),work8(8+15) - common /fftwork/work64k,work32k,work16k,work8k,work4k,work2k, + common /fftwork/work128k,work64k,work32k,work16k,work8k,work4k,work2k, & work1k,work512,work256,work128,work64, & work32,work16,work8 @@ -96,6 +97,7 @@ end function sfftw_import_wisdom_from_filename if(dir .eq. 0) then + if (n.eq.131072) call cfft1di(n,work128k) if (n.eq.65536) call cfft1di(n,work64k) if (n.eq.32768) call cfft1di(n,work32k) if (n.eq.16384) call cfft1di(n,work16k) @@ -113,6 +115,7 @@ end function sfftw_import_wisdom_from_filename return end if + if (n.eq.131072) call cfft1d(dir,n,c,1,work128k) if (n.eq.65536) call cfft1d(dir,n,c,1,work64k) if (n.eq.32768) call cfft1d(dir,n,c,1,work32k) if (n.eq.16384) call cfft1d(dir,n,c,1,work16k) @@ -139,7 +142,7 @@ end function sfftw_import_wisdom_from_filename C Sun WIPSpro Fortran 77 - + complex*8 work128k(4*131072+15) complex*8 work64k(4*65536+15) complex*8 work32k(4*32768+15),work16k(4*16384+15) complex*8 work8k(4*8192+15),work4k(4*4096+15) @@ -147,7 +150,7 @@ end function sfftw_import_wisdom_from_filename complex*8 work512(4*512+15),work256(4*256+15) complex*8 work128(4*128+15),work64(4*64+15) complex*8 work32(4*32+15),work16(4*16+15),work8(4*8+15) - common /fftwork/work64k,work32k,work16k,work8k,work4k,work2k, + common /fftwork/work128k,work64k,work32k,work16k,work8k,work4k,work2k, & work1k,work512,work256,work128,work64, & work32,work16,work8 @@ -155,6 +158,7 @@ end function sfftw_import_wisdom_from_filename if(dir .eq. 0) then + if (n.eq.131072) call cffti(n,work128k) if (n.eq.65536) call cffti(n,work64k) if (n.eq.32768) call cffti(n,work32k) if (n.eq.16384) call cffti(n,work16k) @@ -171,6 +175,7 @@ end function sfftw_import_wisdom_from_filename if (n.eq. 8) call cffti(n,work8) return end if + if (n.eq.131072) call cfftf(n,c,work128k) if (n.eq.65536) call cfftf(n,c,work64k) if (n.eq.32768) call cfftf(n,c,work32k) if (n.eq.16384) call cfftf(n,c,work16k) @@ -231,7 +236,7 @@ end function sfftw_import_wisdom_from_filename if(dir.eq.0)then !c move i calculation to top of function - do i=2,16 + do i=2,17 if(2**i.eq.n)go to 1 end do write(6,*) 'fftw: length unsupported:: ',n @@ -263,6 +268,7 @@ end function sfftw_import_wisdom_from_filename if(n.eq.16384)call sfftw_execute_dft(planf(14),c,c) if(n.eq.32768)call sfftw_execute_dft(planf(15),c,c) if(n.eq.65536)call sfftw_execute_dft(planf(16),c,c) + if(n.eq.131072)call sfftw_execute_dft(planf(17),c,c) end if if(dir.eq. 1)then if(n.eq.4)call sfftw_execute_dft(plani(2),c,c) @@ -280,9 +286,10 @@ end function sfftw_import_wisdom_from_filename if(n.eq.16384)call sfftw_execute_dft(plani(14),c,c) if(n.eq.32768)call sfftw_execute_dft(plani(15),c,c) if(n.eq.65536)call sfftw_execute_dft(plani(16),c,c) + if(n.eq.131072)call sfftw_execute_dft(plani(17),c,c) end if if(dir .eq. 2) then - do i=2,16 + do i=2,17 if(2**i.eq.n)go to 10 end do write(6,*) 'fftw: length unsupported:: ',n diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c b/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c index 31268e21a..f3d00144a 100644 --- a/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c @@ -34,7 +34,7 @@ #include "ALOS_fbd2fbsmodule.h" -const* __doc__ = "Python extension for ALOS_fbd2fbs.c"; +const char* __doc__ = "Python extension for ALOS_fbd2fbs.c"; static struct PyModuleDef moduledef = { // header diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c b/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c index 6f97d2c17..66c655297 100644 --- a/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c @@ -34,7 +34,7 @@ #include "ALOS_fbs2fbdmodule.h" -const* __doc__ = "Python extension for ALOS_fbs2fbd.c"; +const char* __doc__ = "Python extension for ALOS_fbs2fbd.c"; static struct PyModuleDef moduledef = { // header diff --git a/contrib/PyCuAmpcor/examples/cuDenseOffsets.py b/contrib/PyCuAmpcor/examples/cuDenseOffsets.py index 0e6795828..2b0ddd577 100755 --- a/contrib/PyCuAmpcor/examples/cuDenseOffsets.py +++ b/contrib/PyCuAmpcor/examples/cuDenseOffsets.py @@ -398,7 +398,7 @@ def estimateOffsetField(reference, secondary, inps=None): def prepareGeometry(full_dir, out_dir, x_start, y_start, x_step, y_step, x_win_num, y_win_num, - fbases=['hgt','lat','lon','los','shadowMask','waterMask']): + fbases=['lat','lon','los','hgt','z','shadowMask','waterMask']): """Generate multilooked geometry datasets in the same grid as the estimated offset field from the full resolution geometry datasets. Parameters: full_dir - str, path of input geometry directory in full resolution diff --git a/contrib/Snaphu/include/snaphu.h b/contrib/Snaphu/include/snaphu.h index 99fc3f902..3c4edabb3 100644 --- a/contrib/Snaphu/include/snaphu.h +++ b/contrib/Snaphu/include/snaphu.h @@ -141,11 +141,11 @@ #define DEF_NLOOKSAZ 5 #define DEF_NLOOKSOTHER 1 #define DEF_NCORRLOOKS 23.8 -#define DEF_NCORRLOOKSRANGE 3 +#define DEF_NCORRLOOKSRANGE 3 #define DEF_NCORRLOOKSAZ 15 #define DEF_NEARRANGE 831000.0 #define DEF_DR 8.0 -#define DEF_DA 20.0 +#define DEF_DA 20.0 #define DEF_RANGERES 10.0 #define DEF_AZRES 6.0 #define DEF_LAMBDA 0.0565647 @@ -158,7 +158,7 @@ #define DEF_DZRCRITFACTOR 2.0 #define DEF_SHADOW FALSE #define DEF_DZEIMIN -4.0 -#define DEF_LAYWIDTH 16 +#define DEF_LAYWIDTH 16 #define DEF_LAYMINEI 1.25 #define DEF_SLOPERATIOFACTOR 1.18 #define DEF_SIGSQEI 100.0 @@ -180,8 +180,8 @@ #define DEF_DZLAYPEAK -2.0 #define DEF_AZDZFACTOR 0.99 -#define DEF_DZEIFACTOR 4.0 -#define DEF_DZEIWEIGHT 0.5 +#define DEF_DZEIFACTOR 4.0 +#define DEF_DZEIWEIGHT 0.5 #define DEF_DZLAYFACTOR 1.0 #define DEF_LAYCONST 0.9 #define DEF_LAYFALLOFFCONST 2.0 @@ -210,8 +210,8 @@ #define DEF_INITDZR 2048.0 #define DEF_INITDZSTEP 100.0 #define DEF_MAXCOST 1000.0 -#define DEF_COSTSCALE 100.0 -#define DEF_COSTSCALEAMBIGHT 80.0 +#define DEF_COSTSCALE 100.0 +#define DEF_COSTSCALEAMBIGHT 80.0 #define DEF_DNOMINCANGLE 0.01 #define DEF_SRCROW -1 #define DEF_SRCCOL -1 @@ -479,12 +479,12 @@ typedef struct paramST{ signed char transmitmode; /* transmit mode (PINGPONG or SINGLEANTTRANSMIT) */ double baseline; /* baseline length (meters, always postive) */ double baselineangle; /* baseline angle above horizontal (rad) */ - long nlooksrange; /* number of looks in range for input data */ - long nlooksaz; /* number of looks in azimuth for input data */ - long nlooksother; /* number of nonspatial looks for input data */ + long nlooksrange; /* number of looks in range for input data */ + long nlooksaz; /* number of looks in azimuth for input data */ + long nlooksother; /* number of nonspatial looks for input data */ double ncorrlooks; /* number of independent looks in correlation est */ - long ncorrlooksrange; /* number of looks in range for correlation */ - long ncorrlooksaz; /* number of looks in azimuth for correlation */ + long ncorrlooksrange; /* number of looks in range for correlation */ + long ncorrlooksaz; /* number of looks in azimuth for correlation */ double nearrange; /* slant range to near part of swath (meters) */ double dr; /* range bin spacing (meters) */ double da; /* azimuth bin spacing (meters) */ @@ -585,7 +585,7 @@ typedef struct paramST{ long conncompthresh; /* cost threshold for connected component */ long maxncomps; /* max number of connected components */ - + }paramT; @@ -650,186 +650,186 @@ typedef double totalcostT; /* typedef long long totalcostT; */ /* functions in snaphu.c */ -void Unwrap(infileT *infiles, outfileT *outfiles, paramT *params, +void Unwrap(infileT *infiles, outfileT *outfiles, paramT *params, long linelen, long nlines); -void UnwrapTile(infileT *infiles, outfileT *outfiles, paramT *params, +void UnwrapTile(infileT *infiles, outfileT *outfiles, paramT *params, tileparamT *tileparams, long nlines, long linelen); /* functions in snaphu_tile.c */ -void SetupTile(long nlines, long linelen, paramT *params, - tileparamT *tileparams, outfileT *outfiles, +void SetupTile(long nlines, long linelen, paramT *params, + tileparamT *tileparams, outfileT *outfiles, outfileT *tileoutfiles, long tilerow, long tilecol); -void GrowRegions(void **costs, short **flows, long nrow, long ncol, +void GrowRegions(void **costs, short **flows, long nrow, long ncol, incrcostT **incrcosts, outfileT *outfiles, paramT *params); -void GrowConnCompsMask(void **costs, short **flows, long nrow, long ncol, - incrcostT **incrcosts, outfileT *outfiles, +void GrowConnCompsMask(void **costs, short **flows, long nrow, long ncol, + incrcostT **incrcosts, outfileT *outfiles, paramT *params); long ThickenCosts(incrcostT **incrcosts, long nrow, long ncol); -nodeT *RegionsNeighborNode(nodeT *node1, long *arcnumptr, nodeT **nodes, - long *arcrowptr, long *arccolptr, +nodeT *RegionsNeighborNode(nodeT *node1, long *arcnumptr, nodeT **nodes, + long *arcrowptr, long *arccolptr, long nrow, long ncol); void ClearBuckets(bucketT *bkts); -void MergeRegions(nodeT **nodes, nodeT *source, long *regionsizes, +void MergeRegions(nodeT **nodes, nodeT *source, long *regionsizes, long closestregion, long nrow, long ncol); -void RenumberRegion(nodeT **nodes, nodeT *source, long newnum, +void RenumberRegion(nodeT **nodes, nodeT *source, long newnum, long nrow, long ncol); -void AssembleTiles(outfileT *outfiles, paramT *params, +void AssembleTiles(outfileT *outfiles, paramT *params, long nlines, long linelen); void ReadNextRegion(long tilerow, long tilecol, long nlines, long linelen, - outfileT *outfiles, paramT *params, + outfileT *outfiles, paramT *params, short ***nextregionsptr, float ***nextunwphaseptr, - void ***nextcostsptr, + void ***nextcostsptr, long *nextnrowptr, long *nextncolptr); -void SetTileReadParams(tileparamT *tileparams, long nexttilenlines, - long nexttilelinelen, long tilerow, long tilecol, +void SetTileReadParams(tileparamT *tileparams, long nexttilenlines, + long nexttilelinelen, long tilerow, long tilecol, long nlines, long linelen, paramT *params); -void ReadEdgesAboveAndBelow(long tilerow, long tilecol, long nlines, - long linelen, paramT *params, outfileT *outfiles, +void ReadEdgesAboveAndBelow(long tilerow, long tilecol, long nlines, + long linelen, paramT *params, outfileT *outfiles, short *regionsabove, short *regionsbelow, float *unwphaseabove, float *unwphasebelow, void *costsabove, void *costsbelow); -void TraceRegions(short **regions, short **nextregions, short **lastregions, - short *regionsabove, short *regionsbelow, float **unwphase, - float **nextunwphase, float **lastunwphase, - float *unwphaseabove, float *unwphasebelow, void **costs, - void **nextcosts, void **lastcosts, void *costsabove, +void TraceRegions(short **regions, short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, float **unwphase, + float **nextunwphase, float **lastunwphase, + float *unwphaseabove, float *unwphasebelow, void **costs, + void **nextcosts, void **lastcosts, void *costsabove, void *costsbelow, long prevnrow, long prevncol, long tilerow, long tilecol, long nrow, long ncol, nodeT **scndrynodes, - nodesuppT **nodesupp, scndryarcT **scndryarcs, - long ***scndrycosts, short *nscndrynodes, - short *nscndryarcs, long *totarclens, short **bulkoffsets, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long ***scndrycosts, short *nscndrynodes, + short *nscndryarcs, long *totarclens, short **bulkoffsets, paramT *params); -long FindNumPathsOut(nodeT *from, paramT *params, long tilerow, long tilecol, - long nnrow, long nncol, short **regions, +long FindNumPathsOut(nodeT *from, paramT *params, long tilerow, long tilecol, + long nnrow, long nncol, short **regions, short **nextregions, short **lastregions, short *regionsabove, short *regionsbelow, long prevncol); -void RegionTraceCheckNeighbors(nodeT *from, nodeT **nextnodeptr, - nodeT **primarynodes, short **regions, - short **nextregions, short **lastregions, - short *regionsabove, short *regionsbelow, - long tilerow, long tilecol, long nnrow, - long nncol, nodeT **scndrynodes, - nodesuppT **nodesupp, scndryarcT **scndryarcs, - long *nnewnodesptr, long *nnewarcsptr, - long flowmax, long nrow, long ncol, - long prevnrow, long prevncol, paramT *params, - void **costs, void **rightedgecosts, - void **loweredgecosts, void **leftedgecosts, - void **upperedgecosts, short **flows, +void RegionTraceCheckNeighbors(nodeT *from, nodeT **nextnodeptr, + nodeT **primarynodes, short **regions, + short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, + long tilerow, long tilecol, long nnrow, + long nncol, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long *nnewnodesptr, long *nnewarcsptr, + long flowmax, long nrow, long ncol, + long prevnrow, long prevncol, paramT *params, + void **costs, void **rightedgecosts, + void **loweredgecosts, void **leftedgecosts, + void **upperedgecosts, short **flows, short **rightedgeflows, short **loweredgeflows, short **leftedgeflows, short **upperedgeflows, - long ***scndrycosts, - nodeT ***updatednontilenodesptr, - long *nupdatednontilenodesptr, + long ***scndrycosts, + nodeT ***updatednontilenodesptr, + long *nupdatednontilenodesptr, long *updatednontilenodesizeptr, - short **inontilenodeoutarcptr, + short **inontilenodeoutarcptr, long *totarclenptr); -void SetUpperEdge(long ncol, long tilerow, long tilecol, void **voidcosts, - void *voidcostsabove, float **unwphase, - float *unwphaseabove, void **voidupperedgecosts, +void SetUpperEdge(long ncol, long tilerow, long tilecol, void **voidcosts, + void *voidcostsabove, float **unwphase, + float *unwphaseabove, void **voidupperedgecosts, short **upperedgeflows, paramT *params, short **bulkoffsets); -void SetLowerEdge(long nrow, long ncol, long tilerow, long tilecol, - void **voidcosts, void *voidcostsbelow, - float **unwphase, float *unwphasebelow, - void **voidloweredgecosts, short **loweredgeflows, +void SetLowerEdge(long nrow, long ncol, long tilerow, long tilecol, + void **voidcosts, void *voidcostsbelow, + float **unwphase, float *unwphasebelow, + void **voidloweredgecosts, short **loweredgeflows, paramT *params, short **bulkoffsets); -void SetLeftEdge(long nrow, long prevncol, long tilerow, long tilecol, - void **voidcosts, void **voidlastcosts, float **unwphase, - float **lastunwphase, void **voidleftedgecosts, +void SetLeftEdge(long nrow, long prevncol, long tilerow, long tilecol, + void **voidcosts, void **voidlastcosts, float **unwphase, + float **lastunwphase, void **voidleftedgecosts, short **leftedgeflows, paramT *params, short **bulkoffsets); -void SetRightEdge(long nrow, long ncol, long tilerow, long tilecol, - void **voidcosts, void **voidnextcosts, - float **unwphase, float **nextunwphase, - void **voidrightedgecosts, short **rightedgeflows, +void SetRightEdge(long nrow, long ncol, long tilerow, long tilecol, + void **voidcosts, void **voidnextcosts, + float **unwphase, float **nextunwphase, + void **voidrightedgecosts, short **rightedgeflows, paramT *params, short **bulkoffsets); -void TraceSecondaryArc(nodeT *primaryhead, nodeT **scndrynodes, - nodesuppT **nodesupp, scndryarcT **scndryarcs, - long ***scndrycosts, long *nnewnodesptr, - long *nnewarcsptr, long tilerow, long tilecol, - long flowmax, long nrow, long ncol, - long prevnrow, long prevncol, paramT *params, - void **tilecosts, void **rightedgecosts, +void TraceSecondaryArc(nodeT *primaryhead, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long ***scndrycosts, long *nnewnodesptr, + long *nnewarcsptr, long tilerow, long tilecol, + long flowmax, long nrow, long ncol, + long prevnrow, long prevncol, paramT *params, + void **tilecosts, void **rightedgecosts, void **loweredgecosts, void **leftedgecosts, - void **upperedgecosts, short **tileflows, - short **rightedgeflows, short **loweredgeflows, + void **upperedgecosts, short **tileflows, + short **rightedgeflows, short **loweredgeflows, short **leftedgeflows, short **upperedgeflows, - nodeT ***updatednontilenodesptr, - long *nupdatednontilenodesptr, + nodeT ***updatednontilenodesptr, + long *nupdatednontilenodesptr, long *updatednontilenodesizeptr, short **inontilenodeoutarcptr, long *totarclenptr); -nodeT *FindScndryNode(nodeT **scndrynodes, nodesuppT **nodesupp, +nodeT *FindScndryNode(nodeT **scndrynodes, nodesuppT **nodesupp, long tilenum, long primaryrow, long primarycol); -void IntegrateSecondaryFlows(long linelen, long nlines, nodeT **scndrynodes, - nodesuppT **nodesupp, scndryarcT **scndryarcs, - short *nscndryarcs, short **scndryflows, - short **bulkoffsets, outfileT *outfiles, +void IntegrateSecondaryFlows(long linelen, long nlines, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + short *nscndryarcs, short **scndryflows, + short **bulkoffsets, outfileT *outfiles, paramT *params); -void ParseSecondaryFlows(long tilenum, short *nscndryarcs, short **tileflows, - short **regions, short **scndryflows, - nodesuppT **nodesupp, scndryarcT **scndryarcs, +void ParseSecondaryFlows(long tilenum, short *nscndryarcs, short **tileflows, + short **regions, short **scndryflows, + nodesuppT **nodesupp, scndryarcT **scndryarcs, long nrow, long ncol, long ntilerow, long ntilecol, paramT *params); /* functions in snaphu_solver.c */ -long TreeSolve(nodeT **nodes, nodesuppT **nodesupp, nodeT *ground, - nodeT *source, candidateT **candidatelistptr, +long TreeSolve(nodeT **nodes, nodesuppT **nodesupp, nodeT *ground, + nodeT *source, candidateT **candidatelistptr, candidateT **candidatebagptr, long *candidatelistsizeptr, - long *candidatebagsizeptr, bucketT *bkts, short **flows, - void **costs, incrcostT **incrcosts, nodeT ***apexes, - signed char **iscandidate, long ngroundarcs, long nflow, - float **mag, float **wrappedphase, char *outfile, - long nnoderow, short *nnodesperrow, long narcrow, + long *candidatebagsizeptr, bucketT *bkts, short **flows, + void **costs, incrcostT **incrcosts, nodeT ***apexes, + signed char **iscandidate, long ngroundarcs, long nflow, + float **mag, float **wrappedphase, char *outfile, + long nnoderow, short *nnodesperrow, long narcrow, short *narcsperrow, long nrow, long ncol, outfileT *outfiles, paramT *params); -void AddNewNode(nodeT *from, nodeT *to, long arcdir, bucketT *bkts, - long nflow, incrcostT **incrcosts, long arcrow, long arccol, +void AddNewNode(nodeT *from, nodeT *to, long arcdir, bucketT *bkts, + long nflow, incrcostT **incrcosts, long arcrow, long arccol, paramT *params); -void CheckArcReducedCost(nodeT *from, nodeT *to, nodeT *apex, - long arcrow, long arccol, long arcdir, - long nflow, nodeT **nodes, nodeT *ground, - candidateT **candidatebagptr, - long *candidatebagnextptr, - long *candidatebagsizeptr, incrcostT **incrcosts, +void CheckArcReducedCost(nodeT *from, nodeT *to, nodeT *apex, + long arcrow, long arccol, long arcdir, + long nflow, nodeT **nodes, nodeT *ground, + candidateT **candidatebagptr, + long *candidatebagnextptr, + long *candidatebagsizeptr, incrcostT **incrcosts, signed char **iscandidate, paramT *params); -long InitTree(nodeT *source, nodeT **nodes, nodesuppT **nodesupp, - nodeT *ground, long ngroundarcs, bucketT *bkts, long nflow, - incrcostT **incrcosts, nodeT ***apexes, - signed char **iscandidate, long nnoderow, short *nnodesperrow, - long narcrow, short *narcsperrow, long nrow, long ncol, +long InitTree(nodeT *source, nodeT **nodes, nodesuppT **nodesupp, + nodeT *ground, long ngroundarcs, bucketT *bkts, long nflow, + incrcostT **incrcosts, nodeT ***apexes, + signed char **iscandidate, long nnoderow, short *nnodesperrow, + long narcrow, short *narcsperrow, long nrow, long ncol, paramT *params); nodeT *FindApex(nodeT *from, nodeT *to); int CandidateCompare(const void *c1, const void *c2); nodeT *NeighborNodeGrid(nodeT *node1, long arcnum, long *upperarcnumptr, - nodeT **nodes, nodeT *ground, long *arcrowptr, - long *arccolptr, long *arcdirptr, long nrow, + nodeT **nodes, nodeT *ground, long *arcrowptr, + long *arccolptr, long *arcdirptr, long nrow, long ncol, nodesuppT **nodesupp); nodeT *NeighborNodeNonGrid(nodeT *node1, long arcnum, long *upperarcnumptr, - nodeT **nodes, nodeT *ground, long *arcrowptr, - long *arccolptr, long *arcdirptr, long nrow, + nodeT **nodes, nodeT *ground, long *arcrowptr, + long *arccolptr, long *arcdirptr, long nrow, long ncol, nodesuppT **nodesupp); -void GetArcGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, +void GetArcGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, long *arcdir, long nrow, long ncol, nodesuppT **nodesupp); -void GetArcNonGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, +void GetArcNonGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, long *arcdir, long nrow, long ncol, nodesuppT **nodesupp); -void NonDegenUpdateChildren(nodeT *startnode, nodeT *lastnode, - nodeT *nextonpath, long dgroup, +void NonDegenUpdateChildren(nodeT *startnode, nodeT *lastnode, + nodeT *nextonpath, long dgroup, long ngroundarcs, long nflow, nodeT **nodes, - nodesuppT **nodesupp, nodeT *ground, - nodeT ***apexes, incrcostT **incrcosts, + nodesuppT **nodesupp, nodeT *ground, + nodeT ***apexes, incrcostT **incrcosts, long nrow, long ncol, paramT *params); -void InitNetwork(short **flows, long *ngroundarcsptr, long *ncycleptr, - long *nflowdoneptr, long *mostflowptr, long *nflowptr, - long *candidatebagsizeptr, candidateT **candidatebagptr, - long *candidatelistsizeptr, candidateT **candidatelistptr, - signed char ***iscandidateptr, nodeT ****apexesptr, - bucketT **bktsptr, long *iincrcostfileptr, - incrcostT ***incrcostsptr, nodeT ***nodesptr, nodeT *ground, +void InitNetwork(short **flows, long *ngroundarcsptr, long *ncycleptr, + long *nflowdoneptr, long *mostflowptr, long *nflowptr, + long *candidatebagsizeptr, candidateT **candidatebagptr, + long *candidatelistsizeptr, candidateT **candidatelistptr, + signed char ***iscandidateptr, nodeT ****apexesptr, + bucketT **bktsptr, long *iincrcostfileptr, + incrcostT ***incrcostsptr, nodeT ***nodesptr, nodeT *ground, long *nnoderowptr, short **nnodesperrowptr, long *narcrowptr, - short **narcsperrowptr, long nrow, long ncol, + short **narcsperrowptr, long nrow, long ncol, signed char *notfirstloopptr, totalcostT *totalcostptr, paramT *params); void InitNodeNums(long nrow, long ncol, nodeT **nodes, nodeT *ground); @@ -840,104 +840,104 @@ void BucketRemove(nodeT *node, long ind, bucketT *bkts); nodeT *ClosestNode(bucketT *bkts); nodeT *ClosestNodeCircular(bucketT *bkts); nodeT *MinOutCostNode(bucketT *bkts); -nodeT *SelectSource(nodeT **nodes, nodeT *ground, long nflow, - short **flows, long ngroundarcs, +nodeT *SelectSource(nodeT **nodes, nodeT *ground, long nflow, + short **flows, long ngroundarcs, long nrow, long ncol, paramT *params); -short GetCost(incrcostT **incrcosts, long arcrow, long arccol, +short GetCost(incrcostT **incrcosts, long arcrow, long arccol, long arcdir); -long ReCalcCost(void **costs, incrcostT **incrcosts, long flow, - long arcrow, long arccol, long nflow, long nrow, +long ReCalcCost(void **costs, incrcostT **incrcosts, long flow, + long arcrow, long arccol, long nflow, long nrow, paramT *params); void SetupIncrFlowCosts(void **costs, incrcostT **incrcosts, short **flows, - long nflow, long nrow, long narcrow, + long nflow, long nrow, long narcrow, short *narcsperrow, paramT *params); totalcostT EvaluateTotalCost(void **costs, short **flows, long nrow, long ncol, short *narcsperrow,paramT *params); -void MSTInitFlows(float **wrappedphase, short ***flowsptr, - short **mstcosts, long nrow, long ncol, +void MSTInitFlows(float **wrappedphase, short ***flowsptr, + short **mstcosts, long nrow, long ncol, nodeT ***nodes, nodeT *ground, long maxflow); -void SolveMST(nodeT **nodes, nodeT *source, nodeT *ground, - bucketT *bkts, short **mstcosts, signed char **residue, +void SolveMST(nodeT **nodes, nodeT *source, nodeT *ground, + bucketT *bkts, short **mstcosts, signed char **residue, signed char **arcstatus, long nrow, long ncol); long DischargeTree(nodeT *source, short **mstcosts, short **flows, - signed char **residue, signed char **arcstatus, + signed char **residue, signed char **arcstatus, nodeT **nodes, nodeT *ground, long nrow, long ncol); -signed char ClipFlow(signed char **residue, short **flows, - short **mstcosts, long nrow, long ncol, +signed char ClipFlow(signed char **residue, short **flows, + short **mstcosts, long nrow, long ncol, long maxflow); -void MCFInitFlows(float **wrappedphase, short ***flowsptr, short **mstcosts, +void MCFInitFlows(float **wrappedphase, short ***flowsptr, short **mstcosts, long nrow, long ncol, long cs2scalefactor); /* functions in snaphu_cost.c */ -void BuildCostArrays(void ***costsptr, short ***mstcostsptr, - float **mag, float **wrappedphase, - float **unwrappedest, long linelen, long nlines, - long nrow, long ncol, paramT *params, - tileparamT *tileparams, infileT *infiles, +void BuildCostArrays(void ***costsptr, short ***mstcostsptr, + float **mag, float **wrappedphase, + float **unwrappedest, long linelen, long nlines, + long nrow, long ncol, paramT *params, + tileparamT *tileparams, infileT *infiles, outfileT *outfiles); -void **BuildStatCostsTopo(float **wrappedphase, float **mag, - float **unwrappedest, float **pwr, +void **BuildStatCostsTopo(float **wrappedphase, float **mag, + float **unwrappedest, float **pwr, float **corr, short **rowweight, short **colweight, - long nrow, long ncol, tileparamT *tileparams, + long nrow, long ncol, tileparamT *tileparams, outfileT *outfiles, paramT *params); -void **BuildStatCostsDefo(float **wrappedphase, float **mag, - float **unwrappedest, float **corr, +void **BuildStatCostsDefo(float **wrappedphase, float **mag, + float **unwrappedest, float **corr, short **rowweight, short **colweight, - long nrow, long ncol, tileparamT *tileparams, + long nrow, long ncol, tileparamT *tileparams, outfileT *outfiles, paramT *params); -void **BuildStatCostsSmooth(float **wrappedphase, float **mag, - float **unwrappedest, float **corr, +void **BuildStatCostsSmooth(float **wrappedphase, float **mag, + float **unwrappedest, float **corr, short **rowweight, short **colweight, - long nrow, long ncol, tileparamT *tileparams, + long nrow, long ncol, tileparamT *tileparams, outfileT *outfiles, paramT *params); -void GetIntensityAndCorrelation(float **mag, float **wrappedphase, - float ***pwrptr, float ***corrptr, +void GetIntensityAndCorrelation(float **mag, float **wrappedphase, + float ***pwrptr, float ***corrptr, infileT *infiles, long linelen, long nlines, - long nrow, long ncol, outfileT *outfiles, + long nrow, long ncol, outfileT *outfiles, paramT *params, tileparamT *tileparams); -void RemoveMean(float **ei, long nrow, long ncol, +void RemoveMean(float **ei, long nrow, long ncol, long krowei, long kcolei); -float *BuildDZRCritLookupTable(double *nominc0ptr, double *dnomincptr, +float *BuildDZRCritLookupTable(double *nominc0ptr, double *dnomincptr, long *tablesizeptr, tileparamT *tileparams, paramT *params); -double SolveDZRCrit(double sinnomincangle, double cosnomincangle, +double SolveDZRCrit(double sinnomincangle, double cosnomincangle, paramT *params, double threshold); -void SolveEIModelParams(double *slope1ptr, double *slope2ptr, - double *const1ptr, double *const2ptr, - double dzrcrit, double dzr0, double sinnomincangle, +void SolveEIModelParams(double *slope1ptr, double *slope2ptr, + double *const1ptr, double *const2ptr, + double dzrcrit, double dzr0, double sinnomincangle, double cosnomincangle, paramT *params); double EIofDZR(double dzr, double sinnomincangle, double cosnomincangle, paramT *params); -float **BuildDZRhoMaxLookupTable(double nominc0, double dnominc, - long nominctablesize, double rhomin, +float **BuildDZRhoMaxLookupTable(double nominc0, double dnominc, + long nominctablesize, double rhomin, double drho, long nrho, paramT *params); -double CalcDZRhoMax(double rho, double nominc, paramT *params, +double CalcDZRhoMax(double rho, double nominc, paramT *params, double threshold); -void CalcCostTopo(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostTopo(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostDefo(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostDefo(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostSmooth(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostSmooth(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostL0(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostL0(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostL1(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostL1(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostL2(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostL2(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostLP(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostLP(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); -void CalcCostNonGrid(void **costs, long flow, long arcrow, long arccol, - long nflow, long nrow, paramT *params, +void CalcCostNonGrid(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, long *poscostptr, long *negcostptr); long EvalCostTopo(void **costs, short **flows, long arcrow, long arccol, long nrow, paramT *params); @@ -953,7 +953,7 @@ long EvalCostL2(void **costs, short **flows, long arcrow, long arccol, long nrow, paramT *params); long EvalCostLP(void **costs, short **flows, long arcrow, long arccol, long nrow, paramT *params); -long EvalCostNonGrid(void **costs, short **flows, long arcrow, long arccol, +long EvalCostNonGrid(void **costs, short **flows, long arcrow, long arccol, long nrow, paramT *params); void CalcInitMaxFlow(paramT *params, void **costs, long nrow, long ncol); @@ -970,12 +970,12 @@ void CalcWrappedRangeDiffs(float **dpsi, float **avgdpsi, float **wrappedphase, long nrow, long ncol); void CalcWrappedAzDiffs(float **dpsi, float **avgdpsi, float **wrappedphase, long kperpdpsi, long kpardpsi, long nrow, long ncol); -void CycleResidue(float **phase, signed char **residue, +void CycleResidue(float **phase, signed char **residue, int nrow, int ncol); void CalcFlow(float **phase, short ***flowsptr, long nrow, long ncol); void IntegratePhase(float **psi, float **phi, short **flows, long nrow, long ncol); -float **ExtractFlow(float **unwrappedphase, short ***flowsptr, +float **ExtractFlow(float **unwrappedphase, short ***flowsptr, long nrow, long ncol); void FlipPhaseArraySign(float **arr, paramT *params, long nrow, long ncol); void FlipFlowArraySign(short **arr, paramT *params, long nrow, long ncol); @@ -988,18 +988,18 @@ void *ReAlloc(void *ptr, size_t size); void Free2DArray(void **array, unsigned int nrow); void Set2DShortArray(short **arr, long nrow, long ncol, long value); signed char ValidDataArray(float **arr, long nrow, long ncol); -signed char IsFinite(double d); +int IsFinite(double d); long LRound(double a); long Short2DRowColAbsMax(short **arr, long nrow, long ncol); float LinInterp1D(float *arr, double index, long nelem); -float LinInterp2D(float **arr, double rowind, double colind , +float LinInterp2D(float **arr, double rowind, double colind , long nrow, long ncol); void Despeckle(float **mag, float ***ei, long nrow, long ncol); float **MirrorPad(float **array1, long nrow, long ncol, long krow, long kcol); -void BoxCarAvg(float **avgarr, float **padarr, long nrow, long ncol, +void BoxCarAvg(float **avgarr, float **padarr, long nrow, long ncol, long krow, long kcol); char *StrNCopy(char *dest, const char *src, size_t n); -void FlattenWrappedPhase(float **wrappedphase, float **unwrappedest, +void FlattenWrappedPhase(float **wrappedphase, float **unwrappedest, long nrow, long ncol); void Add2DFloatArrays(float **arr1, float **arr2, long nrow, long ncol); int StringToDouble(char *str, double *d); @@ -1017,62 +1017,62 @@ int LongCompare(const void *c1, const void *c2); void SetDefaults(infileT *infiles, outfileT *outfiles, paramT *params); void ProcessArgs(int argc, char *argv[], infileT *infiles, outfileT *outfiles, long *ncolptr, paramT *params); -void CheckParams(infileT *infiles, outfileT *outfiles, +void CheckParams(infileT *infiles, outfileT *outfiles, long linelen, long nlines, paramT *params); void ReadConfigFile(char *conffile, infileT *infiles, outfileT *outfiles, long *ncolptr, paramT *params); -void WriteConfigLogFile(int argc, char *argv[], infileT *infiles, +void WriteConfigLogFile(int argc, char *argv[], infileT *infiles, outfileT *outfiles, long linelen, paramT *params); void LogStringParam(FILE *fp, char *key, char *value); void LogBoolParam(FILE *fp, char *key, signed char boolvalue); void LogFileFormat(FILE *fp, char *key, signed char fileformat); long GetNLines(infileT *infiles, long linelen); -void WriteOutputFile(float **mag, float **unwrappedphase, char *outfile, +void WriteOutputFile(float **mag, float **unwrappedphase, char *outfile, outfileT *outfiles, long nrow, long ncol); FILE *OpenOutputFile(char *outfile, char *realoutfile); -void WriteAltLineFile(float **mag, float **phase, char *outfile, +void WriteAltLineFile(float **mag, float **phase, char *outfile, long nrow, long ncol); -void WriteAltSampFile(float **arr1, float **arr2, char *outfile, +void WriteAltSampFile(float **arr1, float **arr2, char *outfile, long nrow, long ncol); -void Write2DArray(void **array, char *filename, long nrow, long ncol, +void Write2DArray(void **array, char *filename, long nrow, long ncol, size_t size); -void Write2DRowColArray(void **array, char *filename, long nrow, +void Write2DRowColArray(void **array, char *filename, long nrow, long ncol, size_t size); void ReadInputFile(infileT *infiles, float ***magptr, float ***wrappedphaseptr, - short ***flowsptr, long linelen, long nlines, + short ***flowsptr, long linelen, long nlines, paramT *params, tileparamT *tileparams); -void ReadMagnitude(float **mag, infileT *infiles, long linelen, long nlines, +void ReadMagnitude(float **mag, infileT *infiles, long linelen, long nlines, tileparamT *tileparams); -void ReadUnwrappedEstimateFile(float ***unwrappedestptr, infileT *infiles, - long linelen, long nlines, +void ReadUnwrappedEstimateFile(float ***unwrappedestptr, infileT *infiles, + long linelen, long nlines, paramT *params, tileparamT *tileparams); -void ReadWeightsFile(short ***weightsptr,char *weightfile, +void ReadWeightsFile(short ***weightsptr,char *weightfile, long linelen, long nlines, tileparamT *tileparams); -void ReadIntensity(float ***pwrptr, float ***pwr1ptr, float ***pwr2ptr, - infileT *infiles, long linelen, long nlines, +void ReadIntensity(float ***pwrptr, float ***pwr1ptr, float ***pwr2ptr, + infileT *infiles, long linelen, long nlines, paramT *params, tileparamT *tileparams); void ReadCorrelation(float ***corrptr, infileT *infiles, long linelen, long nlines, tileparamT *tileparams); -void ReadAltLineFile(float ***mag, float ***phase, char *alfile, +void ReadAltLineFile(float ***mag, float ***phase, char *alfile, long linelen, long nlines, tileparamT *tileparams); -void ReadAltLineFilePhase(float ***phase, char *alfile, +void ReadAltLineFilePhase(float ***phase, char *alfile, long linelen, long nlines, tileparamT *tileparams); -void ReadComplexFile(float ***mag, float ***phase, char *rifile, +void ReadComplexFile(float ***mag, float ***phase, char *rifile, long linelen, long nlines, tileparamT *tileparams); -void Read2DArray(void ***arr, char *infile, long linelen, long nlines, +void Read2DArray(void ***arr, char *infile, long linelen, long nlines, tileparamT *tileparams, size_t elptrsize, size_t elsize); void ReadAltSampFile(float ***arr1, float ***arr2, char *infile, long linelen, long nlines, tileparamT *tileparams); -void Read2DRowColFile(void ***arr, char *filename, long linelen, long nlines, +void Read2DRowColFile(void ***arr, char *filename, long linelen, long nlines, tileparamT *tileparams, size_t size); -void Read2DRowColFileRows(void ***arr, char *filename, long linelen, +void Read2DRowColFileRows(void ***arr, char *filename, long linelen, long nlines, tileparamT *tileparams, size_t size); void SetDumpAll(outfileT *outfiles, paramT *params); void SetStreamPointers(void); void SetVerboseOut(paramT *params); void ChildResetStreamPointers(pid_t pid, long tilerow, long tilecol, paramT *params); -void DumpIncrCostFiles(incrcostT **incrcosts, long iincrcostfile, +void DumpIncrCostFiles(incrcostT **incrcosts, long iincrcostfile, long nflow, long nrow, long ncol); void MakeTileDir(paramT *params, outfileT *outfiles); void ParseFilename(char *filename, char *path, char *basename); @@ -1080,7 +1080,7 @@ void ParseFilename(char *filename, char *path, char *basename); /* functions in snaphu_cs2.c */ -void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, +void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, long cs2scalefactor, short ***flowsptr); diff --git a/contrib/Snaphu/src/snaphu_io.c b/contrib/Snaphu/src/snaphu_io.c index 8fa2bbd76..7739eaa3a 100644 --- a/contrib/Snaphu/src/snaphu_io.c +++ b/contrib/Snaphu/src/snaphu_io.c @@ -24,6 +24,7 @@ #include #include #include +#include #include "snaphu.h" diff --git a/contrib/Snaphu/src/snaphu_util.c b/contrib/Snaphu/src/snaphu_util.c index bb1422d79..08827f04d 100644 --- a/contrib/Snaphu/src/snaphu_util.c +++ b/contrib/Snaphu/src/snaphu_util.c @@ -23,13 +23,14 @@ #include #include #include +#include #include "snaphu.h" /* function: IsTrue() * ------------------ - * Returns TRUE if the string input is any of TRUE, True, true, 1, + * Returns TRUE if the string input is any of TRUE, True, true, 1, * y, Y, yes, YES */ int IsTrue(char *str){ @@ -46,7 +47,7 @@ int IsTrue(char *str){ /* function: IsFalse() * ------------------ - * Returns FALSE if the string input is any of FALSE, False, false, + * Returns FALSE if the string input is any of FALSE, False, false, * 0, n, N, no, NO */ int IsFalse(char *str){ @@ -82,10 +83,10 @@ signed char SetBooleanSignedChar(signed char *boolptr, char *str){ /* function: ModDiff() * ------------------- * Computes floating point difference between two numbers. - * f1 and f2 should be between [0,2pi). The result is the + * f1 and f2 should be between [0,2pi). The result is the * modulo difference between (-pi,pi]. Assumes that - * PI and TWOPI have been defined. - */ + * PI and TWOPI have been defined. + */ double ModDiff(double f1, double f2){ double f3; @@ -184,13 +185,13 @@ void CalcWrappedAzDiffs(float **dpsi, float **avgdpsi, float **wrappedphase, /* function: CycleResidue() * ------------------------ - * Computes the cycle array of a phase 2D phase array. Input arrays - * should be type float ** and signed char ** with memory pre-allocated. - * Numbers of rows and columns in phase array should be passed. + * Computes the cycle array of a phase 2D phase array. Input arrays + * should be type float ** and signed char ** with memory pre-allocated. + * Numbers of rows and columns in phase array should be passed. * Residue array will then have size nrow-1 x ncol-1. Residues will * always be -1, 0, or 1 if wrapped phase is passed in. */ -void CycleResidue(float **phase, signed char **residue, +void CycleResidue(float **phase, signed char **residue, int nrow, int ncol){ int row, col; @@ -227,7 +228,7 @@ void CycleResidue(float **phase, signed char **residue, /* function: CalcFlow() * -------------------- - * Calculates flow based on unwrapped phase data in a 2D array. + * Calculates flow based on unwrapped phase data in a 2D array. * Allocates memory for row and column flow arrays. */ void CalcFlow(float **phase, short ***flowsptr, long nrow, long ncol){ @@ -247,7 +248,7 @@ void CalcFlow(float **phase, short ***flowsptr, long nrow, long ncol){ /TWOPI); } } - + /* get col flows (horizontal phase differences) */ for(row=0;row=HUGE_VAL || tempdouble<=-HUGE_VAL){ @@ -952,15 +953,15 @@ int StringToDouble(char *str, double *d){ /* function: StringToLong() * ------------------------ - * Uses strtol to convert a string to a base-10 long, but also does error - * checking. If any part of the string is not converted, the function does + * Uses strtol to convert a string to a base-10 long, but also does error + * checking. If any part of the string is not converted, the function does * not make the assignment and returns TRUE. Otherwise, returns FALSE. */ int StringToLong(char *str, long *l){ long templong; char *endp; - + endp=str; templong=strtol(str,&endp,10); if(strlen(endp) || templong==LONG_MAX || templong==LONG_MIN){ @@ -974,7 +975,7 @@ int StringToLong(char *str, long *l){ /* function: CatchSignals() * ------------------------ - * Traps common signals that by default cause the program to abort. + * Traps common signals that by default cause the program to abort. * Sets (pointer to function) Handler as the signal handler for all. * Note that SIGKILL usually cannot be caught. No return value. */ @@ -997,10 +998,10 @@ void CatchSignals(void (*SigHandler)(int)){ /* function: SetDump() * ------------------- * Set the global variable dumpresults_global to TRUE if SIGINT or SIGHUP - * signals recieved. Also sets requestedstop_global if SIGINT signal - * received. This function should only be called via signal() when + * signals recieved. Also sets requestedstop_global if SIGINT signal + * received. This function should only be called via signal() when * a signal is caught. - */ + */ void SetDump(int signum){ if(signum==SIGINT){ @@ -1052,7 +1053,7 @@ void KillChildrenExit(int signum){ * Signal hanlder that prints message about the signal received, then exits. */ void SignalExit(int signum){ - + signal(SIGTERM,SIG_IGN); fprintf(sp0,"Exiting with status %d on signal %d\n",ABNORMAL_EXIT,signum); fflush(NULL); @@ -1067,7 +1068,7 @@ void SignalExit(int signum){ * DisplayElapsedTime(). */ void StartTimers(time_t *tstart, double *cputimestart){ - + struct rusage usagebuf; *tstart=time(NULL); @@ -1092,8 +1093,8 @@ void StartTimers(time_t *tstart, double *cputimestart){ * Displays the elapsed wall clock and CPU times for the process and its * children. Times should be initialized at the start of the program with * StartTimers(). The code is written to show the total processor time - * for the parent process and all of its children, but whether or not - * this is actually done depends on the implementation of the system time + * for the parent process and all of its children, but whether or not + * this is actually done depends on the implementation of the system time * functions. */ void DisplayElapsedTime(time_t tstart, double cputimestart){ diff --git a/contrib/alos2filter/src/psfilt1.c b/contrib/alos2filter/src/psfilt1.c index 3051f1fa2..27c3f4bd2 100644 --- a/contrib/alos2filter/src/psfilt1.c +++ b/contrib/alos2filter/src/psfilt1.c @@ -24,7 +24,7 @@ #define REL_EOF 2 /* fseek relative to end of file */ #define SQR(a) ( (a)*(a) ) -#define PI 3.14159265359 +#define PI 3.14159265359 #define TWO_PI 6.28318530718 #define SQRT2 1.41421356237 /* square root of 2 */ #define RTD 57.2957795131 /* radians to degrees */ @@ -36,12 +36,12 @@ typedef struct{float re,im;} fcomplex; -void fourn(float *, unsigned int *, int ndim, int isign); +void fourn(float *, unsigned int *, int ndim, int isign); void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double, int, int, int); void start_timing(); /* timing routines */ void stop_timing(); - + unsigned int nfft[3]; int xmin=0; /* window column minima */ int ymin=0; /* window row minima */ @@ -50,7 +50,7 @@ int ymax, xmax; /* interferogram width, window maxima */ fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch); void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch); void nrerror(char error_text[]); -signed char IsFinite(double d); +int IsFinite(double d); int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw, int step) { @@ -62,7 +62,7 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw float *data; /* pointer to floats for FFT, union with seg_fft */ //double alpha; /* exponent used to to determine spectal weighting function */ double rw,azw; /* range and azimuth weights used in window function */ - + int nlines=0; /* number of lines in the file */ int offs; /* width and height of file segment*/ //int fftw; /* fft window size*/ @@ -70,10 +70,10 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw int xw,yh; /* width, height of processed region */ int i,j,i1,j1; /* loop counters */ int ndim; /* number of dimensions of FFT */ - int isign; /* direction of FFT */ + int isign; /* direction of FFT */ int nlc; /* number of guides, number of low correlation pixels */ int lc; /* line counter */ - + FILE *int_file, *sm_file; double psd,psd_sc; /* power spectrum, scale factor */ @@ -88,41 +88,41 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw float sf0; // an extra factor to scale the data sf0 = 1.0; - + fprintf(stderr,"*** Weighted power spectrum interferogram filter v1.0 clw 19-Feb-97 ***\n"); if(0){ //fprintf(stderr,"\nUsage: %s [alpha] [fftw] [step] [xmin] [xmax] [ymin] [ymax]\n\n",argv[0]) ; - + fprintf(stderr,"input parameters: \n"); fprintf(stderr," interferogram complex interferogram image filename\n"); fprintf(stderr," smoothed interf. smoothed interferogram filename\n"); fprintf(stderr," width number of samples/row\n"); - fprintf(stderr," alpha spectrum amplitude scale factor (default=.5)\n"); + fprintf(stderr," alpha spectrum amplitude scale factor (default=.5)\n"); fprintf(stderr," fftw fft window size in both range and azimuth directions \n"); fprintf(stderr," step moving step size in both range and azimuth directions (default = fftw/2)\n"); - fprintf(stderr," xmin offset to starting range pixel (default = 0)\n"); - fprintf(stderr," xmax offset last range pixel (default = width-1)\n"); - fprintf(stderr," ymin offset to starting azimuth row (default = 0)\n"); - fprintf(stderr," ymax offset to last azimuth row (default = nlines-1)\n\n"); + fprintf(stderr," xmin offset to starting range pixel (default = 0)\n"); + fprintf(stderr," xmax offset last range pixel (default = width-1)\n"); + fprintf(stderr," ymin offset to starting azimuth row (default = 0)\n"); + fprintf(stderr," ymax offset to last azimuth row (default = nlines-1)\n\n"); exit(-1); } start_timing(); - int_file = fopen(inputfile,"rb"); + int_file = fopen(inputfile,"rb"); if (int_file == NULL){fprintf(stderr,"cannot open interferogram file: %s\n",inputfile); exit(-1);} - sm_file = fopen(outputfile,"wb"); + sm_file = fopen(outputfile,"wb"); if (sm_file == NULL){fprintf(stderr,"cannot create smoothed interferogram file: %s\n",outputfile); exit(-1);} - //sscanf(argv[3],"%d",&width); - xmax = width-1; - + //sscanf(argv[3],"%d",&width); + xmax = width-1; + fseeko(int_file, 0L, REL_EOF); /* determine # lines in the file */ nlines=(int)(ftello(int_file)/(width*2*sizeof(float))); - fprintf(stderr,"#lines in the interferogram file: %d\n",nlines); + fprintf(stderr,"#lines in the interferogram file: %d\n",nlines); rewind(int_file); - + //alpha = ALPHA; //if(argc >= 4)sscanf(argv[4],"%lf",&alpha); fprintf(stdout,"spectrum weighting exponent: %8.4f\n",alpha); @@ -139,26 +139,26 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw fprintf(stdout,"WARNING: wrong step size: %5d, using %5d instead\n",step, fftw/2); step = fftw/2; } - fprintf(stdout,"range and azimuth step size (pixels): %5d\n",step); - + fprintf(stdout,"range and azimuth step size (pixels): %5d\n",step); + ymax=nlines-1; /* default value of ymax */ //if(argc > 7)sscanf(argv[7],"%d",&xmin); /* window to process */ //if(argc > 8)sscanf(argv[8],"%d",&xmax); //if(argc > 9)sscanf(argv[9],"%d",&ymin); //if(argc > 10)sscanf(argv[10],"%d",&ymax); - + if (ymax > nlines-1){ - ymax = nlines-1; - fprintf(stderr,"WARNING: insufficient #lines in the file for given input range: ymax: %d\n",ymax); + ymax = nlines-1; + fprintf(stderr,"WARNING: insufficient #lines in the file for given input range: ymax: %d\n",ymax); } if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ xw=xmax-xmin+1; /* width of array */ - yh=ymax-ymin+1; /* height of array */ + yh=ymax-ymin+1; /* height of array */ offs=ymin; /* first line of file to start reading/writing */ fprintf(stdout,"array width, height, offset: %5d %5d %5d\n",xw,yh,offs); - - cmp = cmatrix(0, fftw-1, -fftw,width+fftw); /* add space around the arrays */ + + cmp = cmatrix(0, fftw-1, -fftw,width+fftw); /* add space around the arrays */ sm = cmatrix(0,fftw-1,-fftw,width+fftw); tmp = (fcomplex **)malloc(sizeof(fcomplex *)*step); @@ -186,28 +186,28 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw for(i=0; i < fftw; i++){ /* initialize circular data buffers */ for(j= -fftw; j < width+fftw; j++){ cmp[i][j].re = 0.0; cmp[i][j].im = 0.0; - sm[i][j].re = 0.0; sm[i][j].im = 0.0; + sm[i][j].re = 0.0; sm[i][j].im = 0.0; } } - + for (i=0; i < fftw; i++){ for (j=0; j < fftw; j++){ - azw = 1.0 - fabs(2.0*(double)(i-fftw/2)/(fftw+1)); - rw = 1.0 - fabs(2.0*(double)(j-fftw/2)/(fftw+1)); + azw = 1.0 - fabs(2.0*(double)(i-fftw/2)/(fftw+1)); + rw = 1.0 - fabs(2.0*(double)(j-fftw/2)/(fftw+1)); wf[i][j]=azw*rw/(double)(fftw*fftw); #ifdef DEBUG fprintf(stderr,"i,j,wf: %5d %5d %12.4e\n",i,j,wf[i][j]); #endif } } - + nfft[1] = fftw; nfft[2] = nfft[1]; nfft[0] = 0; ndim = 2; isign = 1; /* initialize FFT parameter values, inverse FFT */ - + fseek(int_file, offs*width*sizeof(fcomplex), REL_BEGIN); /* seek offset to start line of interferogram */ for (i=0; i < fftw - step; i++){ fread((char *)cmp[i], sizeof(fcomplex), width, int_file); @@ -218,8 +218,8 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw } lc=0; - p_forward = fftwf_plan_dft_2d(fftw, fftw, (fftw_complex *)seg_fft[0], (fftw_complex *)seg_fft[0], FFTW_FORWARD, FFTW_MEASURE); - p_backward = fftwf_plan_dft_2d(fftw, fftw, (fftw_complex *)seg_fft[0], (fftw_complex *)seg_fft[0], FFTW_BACKWARD, FFTW_MEASURE); + p_forward = fftwf_plan_dft_2d(fftw, fftw, (fftwf_complex *)seg_fft[0], (fftwf_complex *)seg_fft[0], FFTW_FORWARD, FFTW_MEASURE); + p_backward = fftwf_plan_dft_2d(fftw, fftw, (fftwf_complex *)seg_fft[0], (fftwf_complex *)seg_fft[0], FFTW_BACKWARD, FFTW_MEASURE); for (i=0; i < yh; i += step){ for(i1=fftw - step; i1 < fftw; i1++){ @@ -233,12 +233,12 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw } for(j1= -fftw; j1 < width+fftw; j1++){ sm[i1][j1].re=0.0; sm[i1][j1].im=0.0; /* clear out area for new sum */ - } + } } if(i%(2*step) == 0)fprintf(stderr,"\rprogress: %3d%%", (int)(i*100/yh + 0.5)); - + for (j=0; j < width; j += step){ - //psd_wgt(cmp, seg_fft, alpha, j, i, fftw); + //psd_wgt(cmp, seg_fft, alpha, j, i, fftw); //////////////////////////////////////////////////////////////////////////////////////// //replace function psd_wgt with the following to call FFTW, crl, 23-APR-2020 for (ii=0; ii < fftw; ii++){ /* load up data array */ @@ -256,7 +256,7 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw psd = seg_fft[ii][jj].re * seg_fft[ii][jj].re + seg_fft[ii][jj].im * seg_fft[ii][jj].im; psd_sc = pow(psd,alpha/2.); seg_fft[ii][jj].re *= psd_sc; - seg_fft[ii][jj].im *= psd_sc; + seg_fft[ii][jj].im *= psd_sc; } } ///////////////////////////////////////////////////////////////////////////////////////// @@ -266,17 +266,17 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw for (i1=0; i1 < fftw; i1++){ /* save filtered output values */ for (j1=0; j1 < fftw; j1++){ if(cmp[i1][j+j1].re !=0.0){ - sm[i1][j+j1].re += wf[i1][j1]*seg_fft[i1][j1].re; + sm[i1][j+j1].re += wf[i1][j1]*seg_fft[i1][j1].re; sm[i1][j+j1].im += wf[i1][j1]*seg_fft[i1][j1].im; } else{ - sm[i1][j+j1].re=0.0; + sm[i1][j+j1].re=0.0; sm[i1][j+j1].im=0.0; } } } - } - for (i1=0; i1 < step; i1++){ + } + for (i1=0; i1 < step; i1++){ if (lc < yh){ for(k = 0; k < width; k++){ if(!IsFinite(sm[i1][k].re)) @@ -288,7 +288,7 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw sm[i1][k].im = 0.0; } } - fwrite((char *)sm[i1], sizeof(fcomplex), width, sm_file); + fwrite((char *)sm[i1], sizeof(fcomplex), width, sm_file); } lc++; } @@ -297,13 +297,13 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw for (i1=0; i1 < step; i1++){cmp[fftw - step+i1] = tmp[i1]; sm[fftw - step+i1]=tmp1[i1];} /* copy pointers back */ } fprintf(stderr,"\rprogress: %3d%%", 100); - - for(i=lc; i < yh; i++){ /* write null lines of filtered complex data */ + + for(i=lc; i < yh; i++){ /* write null lines of filtered complex data */ for(k = 0; k < width; k++){ if(!IsFinite(bufcz[k].re)) bufcz[k].re = 0.0; if(!IsFinite(bufcz[k].im)) - bufcz[k].im = 0.0; + bufcz[k].im = 0.0; if(!IsFinite(sqrt(bufcz[k].re*bufcz[k].re + bufcz[k].im*bufcz[k].im))){ bufcz[k].re = 0.0; bufcz[k].im = 0.0; @@ -311,7 +311,7 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw } fwrite((char *)bufcz, sizeof(fcomplex), width, sm_file); lc++; - } + } fprintf(stdout,"\nnumber of lines written to file: %d\n",lc); stop_timing(); @@ -328,14 +328,14 @@ int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw free(tmp1); free(wf); free(wfb); - fclose(int_file); + fclose(int_file); fclose(sm_file); - return(0); -} - + return(0); +} + void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double alpha, int ix, int iy, int fftw) -/* +/* subroutine to perform non-linear spectral filtering 17-Feb-97 clw */ @@ -343,14 +343,14 @@ void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double alpha, int ix, int iy, i double psd,psd_sc; /* power spectrum, scale factor */ int i,j; /* loop counters */ int ndim,isign; /* number of dimensions in fft */ - + int ic; - + unsigned int nfft[3]; ic = 0; - + ndim=2, isign = -1, nfft[1]=fftw, nfft[2]=fftw, nfft[0]=0; /* fft initialization */ - + for (i=0; i < fftw; i++){ /* load up data array */ for (j=ix; j < ix+fftw; j++){ seg_fft[i][j-ix].re = cmp[i][j].re; @@ -365,7 +365,7 @@ void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double alpha, int ix, int iy, i psd = seg_fft[i][j].re * seg_fft[i][j].re + seg_fft[i][j].im * seg_fft[i][j].im; psd_sc = pow(psd,alpha/2.); seg_fft[i][j].re *= psd_sc; - seg_fft[i][j].im *= psd_sc; + seg_fft[i][j].im *= psd_sc; } } } @@ -509,20 +509,20 @@ void stop_timing() elapsed_time = (end_time - start_time); fprintf(stdout,"\n\nuser time (s): %10.3f\n", (double)user_time/clk_tck); - fprintf(stdout,"system time (s): %10.3f\n", (double)system_time/clk_tck); + fprintf(stdout,"system time (s): %10.3f\n", (double)system_time/clk_tck); fprintf(stdout,"elapsed time (s): %10.3f\n\n", (double) elapsed_time/clk_tck); } /* function: IsFinite() * -------------------- - * This function takes a double and returns a nonzero value if + * This function takes a double and returns a nonzero value if * the arguemnt is finite (not NaN and not infinite), and zero otherwise. * Different implementations are given here since not all machines have * these functions available. */ -signed char IsFinite(double d){ +int IsFinite(double d){ - return(finite(d)); + return(isfinite(d)); /* return(isfinite(d)); */ /* return(!(isnan(d) || isinf(d))); */ /* return(TRUE) */ diff --git a/contrib/mdx/src/graphx_mdx.c b/contrib/mdx/src/graphx_mdx.c index e76132184..e439fcbf7 100755 --- a/contrib/mdx/src/graphx_mdx.c +++ b/contrib/mdx/src/graphx_mdx.c @@ -677,7 +677,7 @@ char a_data[3360]; if (i_db > 8-1) printf("a_lll = %d %s %d %d %d\n",a_lll,a_lll,*(a_lll+0),*(a_lll+1),*(a_lll+2)); XtAddCallback(textwin, XmNactivateCallback, - XmProcessTraversal, (XtPointer)XmTRAVERSE_NEXT_TAB_GROUP); + (XtCallbackProc)XmProcessTraversal, (XtPointer)XmTRAVERSE_NEXT_TAB_GROUP); } diff --git a/contrib/stack/README.md b/contrib/stack/README.md index 56750bb2f..99331cffc 100644 --- a/contrib/stack/README.md +++ b/contrib/stack/README.md @@ -1,4 +1,4 @@ -## ISCE-2 Stack Processors +## ISCE2 Stack Processors Read the document for each stack processor for details. diff --git a/contrib/stack/alosStack/StackPulic.py b/contrib/stack/alosStack/StackPulic.py index ba919973b..a19257eaa 100644 --- a/contrib/stack/alosStack/StackPulic.py +++ b/contrib/stack/alosStack/StackPulic.py @@ -187,7 +187,7 @@ def stackDateStatistics(idir, dateReference): #get date folders dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) - dateDirs = [x for x in dateDirs if os.path.isdir(x)] + dateDirs = [x for x in dateDirs if os.path.isdir(x) and os.path.basename(x).isdigit()] #find index of reference date: dates = [] diff --git a/contrib/stack/alosStack/create_cmds.py b/contrib/stack/alosStack/create_cmds.py index f4dea2f36..e6b931438 100755 --- a/contrib/stack/alosStack/create_cmds.py +++ b/contrib/stack/alosStack/create_cmds.py @@ -88,9 +88,9 @@ def formPairs(idir, numberOfSubsequentDates, pairTimeSpanMinimum=None, pairTimeS ''' datefmt = "%y%m%d" - #get date folders + #get date folders [and ignore sub-folders that are not digits, e.g. ARCHIVED_FILES] dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) - dateDirs = [x for x in dateDirs if os.path.isdir(x)] + dateDirs = [x for x in dateDirs if os.path.isdir(x) and os.path.basename(x).isdigit()] dates = [os.path.basename(x) for x in dateDirs] ndate = len(dates) @@ -224,7 +224,7 @@ def checkStackDataDir(idir): #get date folders dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) - dateDirs = [x for x in dateDirs if os.path.isdir(x)] + dateDirs = [x for x in dateDirs if os.path.isdir(x) and os.path.basename(x).isdigit()] #check dates and acquisition mode mode = os.path.basename(sorted(glob.glob(os.path.join(dateDirs[0], 'IMG-HH-ALOS2*')))[0]).split('-')[4][0:3] @@ -1258,7 +1258,7 @@ def parallelSettings(array): extraArguments = '' if insar.geocodeInterpMethod is not None: extraArguments += ' -interp_method {}'.format(insar.geocodeInterpMethod) - if insar.bbox is not None: + if insar.bbox: # is not None: extraArguments += ' -bbox {}'.format('/'.format(insar.bbox)) cmd += header('geocode') @@ -1371,8 +1371,8 @@ def cmdLineParse(): stack.datesExcluded, stack.pairsExcluded) datesProcess = datesFromPairs(pairsProcess) print('InSAR processing:') - print('dates: {}'.format(' '.join(datesProcess))) - print('pairs: {}'.format(' '.join(pairsProcess))) + print(f"dates ({len(datesProcess)}): {' '.join(datesProcess)}") + print(f"pairs ({len(pairsProcess)}): {' '.join(pairsProcess)}") rank = stackRank(datesProcess, pairsProcess) if rank != len(datesProcess) - 1: @@ -1388,8 +1388,8 @@ def cmdLineParse(): stack.datesExcludedIon, stack.pairsExcludedIon) datesProcessIon = datesFromPairs(pairsProcessIon) print('ionospheric phase estimation:') - print('dates: {}'.format(' '.join(datesProcessIon))) - print('pairs: {}'.format(' '.join(pairsProcessIon))) + print(f"dates ({len(datesProcessIon)}): {' '.join(datesProcessIon)}") + print(f"pairs ({len(pairsProcessIon)}): {' '.join(pairsProcessIon)}") rankIon = stackRank(datesProcessIon, pairsProcessIon) if rankIon != len(datesProcessIon) - 1: @@ -1418,7 +1418,7 @@ def cmdLineParse(): datesProcess, datesProcessRemoved = removeCommonItemsLists(datesProcess, datesProcessedAlready) if datesProcessRemoved != []: print('the following dates have already been processed, will not reprocess them.') - print('dates: {}'.format(' '.join(datesProcessRemoved))) + print(f"dates ({len(datesProcessRemoved)}): {' '.join(datesProcessRemoved)}") print() pairsProcessedAlready = getFolders(stack.pairsProcessingDir) @@ -1426,7 +1426,7 @@ def cmdLineParse(): pairsProcess, pairsProcessRemoved = removeCommonItemsLists(pairsProcess, pairsProcessedAlready) if pairsProcessRemoved != []: print('the following pairs for InSAR processing have already been processed, will not reprocess them.') - print('pairs: {}'.format(' '.join(pairsProcessRemoved))) + print(f"pairs ({len(pairsProcessRemoved)}): {' '.join(pairsProcessRemoved)}") print() if insar.doIon: @@ -1435,16 +1435,16 @@ def cmdLineParse(): pairsProcessIon, pairsProcessRemovedIon = removeCommonItemsLists(pairsProcessIon, pairsProcessedAlreadyIon) if pairsProcessRemovedIon != []: print('the following pairs for estimating ionospheric phase have already been processed, will not reprocess them.') - print('pairs: {}'.format(' '.join(pairsProcessRemovedIon))) + print(f"pairs ({len(pairsProcessRemovedIon)}): {' '.join(pairsProcessRemovedIon)}") print() print() - + print('dates and pairs to be processed:') - print('dates: {}'.format(' '.join(datesProcess))) - print('pairs (for InSAR processing): {}'.format(' '.join(pairsProcess))) + print(f"dates ({len(datesProcess)}): {' '.join(datesProcess)}") + print(f"pairs ({len(pairsProcess)}; for InSAR processing): {' '.join(pairsProcess)}") if insar.doIon: - print('pairs (for estimating ionospheric phase): {}'.format(' '.join(pairsProcessIon))) + print(f"pairs ({len(pairsProcessIon)}; for estimating ionospheric phase): {' '.join(pairsProcessIon)}") print('\n') diff --git a/contrib/stack/alosStack/ion_check.py b/contrib/stack/alosStack/ion_check.py index a341d7362..ec00c3b35 100755 --- a/contrib/stack/alosStack/ion_check.py +++ b/contrib/stack/alosStack/ion_check.py @@ -103,7 +103,7 @@ def cmdLineParse(): runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out2.ppm'))) runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(diff, width, wbdArguments, odir)) runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out3.ppm'))) - runCmd("montage -pointsize {} -label 'original' {} -label 'ionosphere' {} -label 'corrected' {} -geometry +{} -compress LZW{} {}.tif".format( + runCmd("montage -font DejaVu-Sans -pointsize {} -label 'original' {} -label 'ionosphere' {} -label 'corrected' {} -geometry +{} -compress LZW{} {}.tif".format( int((ratio*width)/111*18+0.5), os.path.join(odir, 'out1.ppm'), os.path.join(odir, 'out2.ppm'), diff --git a/contrib/stack/alosStack/read_data.py b/contrib/stack/alosStack/read_data.py index 44c2bc7a7..26ed5224e 100755 --- a/contrib/stack/alosStack/read_data.py +++ b/contrib/stack/alosStack/read_data.py @@ -34,7 +34,7 @@ def sorter(item): #get only folders in dataDir dateDirs = sorted(glob.glob(os.path.join(dataDir, '*'))) - dateDirs = [x for x in dateDirs if os.path.isdir(x)] + dateDirs = [x for x in dateDirs if os.path.isdir(x) and os.path.basename(x).isdigit()] ndate = len(dateDirs) #get first LED files in dateDirs diff --git a/contrib/stack/stripmapStack/prepSlcALOS2.py b/contrib/stack/stripmapStack/prepSlcALOS2.py index 1d2c6a187..265419dd0 100755 --- a/contrib/stack/stripmapStack/prepSlcALOS2.py +++ b/contrib/stack/stripmapStack/prepSlcALOS2.py @@ -12,13 +12,20 @@ from uncompressFile import uncompressfile +EXAMPLE = """example: + prepSlcALOS2.py -i download -o SLC + ${ISCE_STACK}/stripmapStack/prepSlcALOS2.py -i download --alosStack +""" + + def createParser(): ''' Create command line parser. ''' parser = argparse.ArgumentParser(description='Prepare ALOS2 SLC for processing (unzip/untar files, ' - 'organize in date folders, generate script to unpack into isce formats).') + 'organize in date folders, generate script to unpack into isce formats).', + epilog=EXAMPLE, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-i', '--input', dest='inputDir', type=str, required=True, help='directory with the downloaded SLC data') parser.add_argument('-o', '--output', dest='outputDir', type=str, required=False, @@ -33,6 +40,9 @@ def createParser(): parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, help='Optional: remove zip/tar/compressed files after unpacking into date structure ' '(default is to keep in archive folder)') + + parser.add_argument('--alosStack', dest='alosStack', action='store_true', default=False, + help='Optional: organize the uncompressed folders into the alosStack conventions.') return parser @@ -178,24 +188,43 @@ def main(iargs=None): workdir = os.path.dirname(ALOS_folder) if successflag: - # move the file into the date folder - SLC_dir = os.path.join(workdir,imgDate,'') - os.makedirs(SLC_dir, exist_ok=True) - - # check if the folder already exist in that case overwrite it - ALOS_folder_out = os.path.join(SLC_dir,os.path.basename(ALOS_folder)) - if os.path.isdir(ALOS_folder_out): - shutil.rmtree(ALOS_folder_out) - # move the ALOS acqusition folder in the date folder - cmd = 'mv ' + ALOS_folder + ' ' + SLC_dir + '.' - os.system(cmd) + if inps.alosStack: + ## alosStack: YYMMDD/IMG-*.1__A + # create the date folder + imgDate = imgDate[2:] + SLC_dir = os.path.join(workdir,imgDate) + os.makedirs(SLC_dir, exist_ok=True) + + # move the ALOS file into the date folder + cmd = f'mv {ALOS_folder}/* {SLC_dir}' + os.system(cmd) + + # remove the ALOS acquisition folder + shutil.rmtree(ALOS_folder) + + else: + ## stripmapStack: YYYYMMDD/ALOS2*/IMG-*.1__A + # create the date folder + SLC_dir = os.path.join(workdir,imgDate) + os.makedirs(SLC_dir, exist_ok=True) + + # check if the folder already exist in that case overwrite it + ALOS_folder_out = os.path.join(SLC_dir,os.path.basename(ALOS_folder)) + if os.path.isdir(ALOS_folder_out): + shutil.rmtree(ALOS_folder_out) + + # move the ALOS acqusition folder in the date folder + cmd = 'mv ' + ALOS_folder + ' ' + SLC_dir + '.' + os.system(cmd) print ('Succes: ' + imgDate) else: print('Failed: ' + ALOS_folder) - + # now generate the unpacking script for all the date dirs + if inps.alosStack: + return dateDirs = sorted(glob.glob(os.path.join(inps.inputDir,'2*'))) if inps.outputDir is not None: f = open(run_unPack,'w') diff --git a/contrib/stack/stripmapStack/unpackFrame_ERS_ENV.py b/contrib/stack/stripmapStack/unpackFrame_ERS_ENV.py new file mode 100755 index 000000000..86ad80cc3 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ERS_ENV.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import isceobj +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +import datetime +import numpy as np + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack ERS(ESA) SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='datadir', type=str, + required=True, help='Input ERS files path') + parser.add_argument('-b', '--box' ,dest='bbox', type=float, nargs=4, default=None, + help='Bbox (SNWE in degrees)') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + parser.add_argument('--orbitdir', dest='orbitdir', type=str, required=True, help='Orbit directory') + parser.add_argument('--orbittype',dest='orbittype', type = str, default='ODR', help='ODR, PDS, PDS') + return parser.parse_args() + +def get_Date(file): + yyyymmdd=file[14:22] + return yyyymmdd + +def write_xml(shelveFile, slcFile): + with shelve.open(shelveFile,flag='r') as db: + frame = db['frame'] + + length = frame.numberOfLines + width = frame.numberOfSamples + print (width,length) + + slc = isceobj.createSlcImage() + slc.setWidth(width) + slc.setLength(length) + slc.filename = slcFile + slc.setAccessMode('write') + slc.renderHdr() + slc.renderVRT() + +def unpack(fname, slcname, orbitdir, orbittype): + ''' + Unpack .E* file to binary SLC file. + ''' + + obj = createSensor('ERS_ENviSAT_SLC') + obj._imageFileName = fname + obj._orbitDir = orbitdir + obj._orbitType = orbittype + #obj.instrumentDir = '/Users/agram/orbit/INS_DIR' + obj.output = os.path.join(slcname,os.path.basename(slcname)+'.slc') + obj.extractImage() + obj.frame.getImage().renderHdr() + obj.extractDoppler() + + + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + # db['doppler'] = poly + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + if not os.path.isdir(inps.slcdir): + os.mkdir(inps.slcdir) + for fname in glob.glob(os.path.join(inps.datadir, '*.E*')): + date = get_Date(os.path.basename(fname)) + slcname = os.path.abspath(os.path.join(inps.slcdir, date)) + os.makedirs(slcname, exist_ok=True) + + print(fname) + unpack(fname, slcname, inps.orbitdir, inps.orbittype) + + slcFile = os.path.abspath(os.path.join(slcname, date+'.slc')) + + shelveFile = os.path.join(slcname, 'data') + write_xml(shelveFile,slcFile) + + if inps.bbox is not None: + slccropname = os.path.abspath(os.path.join(inps.slcdir+'_crop',date)) + os.makedirs(slccropname, exist_ok=True) + cmd = 'cropFrame.py -i {} -o {} -b {}'.format(slcname, slccropname, ' '.join([str(x) for x in inps.bbox])) + print(cmd) + os.system(cmd) + diff --git a/contrib/stack/topsStack/README.md b/contrib/stack/topsStack/README.md index 54b6ac7f6..25323374c 100644 --- a/contrib/stack/topsStack/README.md +++ b/contrib/stack/topsStack/README.md @@ -186,7 +186,7 @@ Ionospheric phase estimation has more requirements than regular InSAR processing In stack ionospheric phase estimation, acquistions with same swath starting ranges are put in a group. A network is formed within a group. Extra pairs are also processed to connect the different groups so that all acquistions are connected. But we need to estimate a phase offset for these extra pairs, which might not be accurate. Therefore, for a particualr swath starting ranges, if there are only a few acquistions, it's better to just discard them so that we don't have to estimate the phase offsets. ``` -s1_select_ion.py -dir data/slc -sn 33.550217/37.119545 -nr 10 +s1_select_ion.py -dir data/slc -sn 33.550217 37.119545 -nr 10 ``` Acquistions to be used need to fully cover the south/north bounds. After running this command, acquistion not to be used will be put in a folder named 'not_used'. It's OK to run this command multiple times. @@ -204,6 +204,8 @@ An example --param_ion file 'ion_param.txt' is provided in the code directory. F stackSentinel.py -s ../data/slc -d ../data/dem/dem_1_arcsec/demLat_N32_N41_Lon_W113_W107.dem.wgs84 -b '33.550217 37.119545 -111.233932 -107.790451' -a ../data/s1_aux_cal -o ../data/orbit -C geometry -c 2 --param_ion ../code/ion_param.txt --num_connections_ion 3 ``` +Note in 'ion_param.txt', if 'consider burst properties in ionosphere computation' is True, Coregistration options '-C', '--coregistration' in stackSentinel.py must be NESD. + If ionospheric phase estimation is enabled in stackSentinel.py, it will generate the following run files. Here ***ns*** means number of steps in the original stack processing, which depends on the type of stack (slc, correlation, interferogram, and offset). - run_ns+1_subband_and_resamp @@ -251,6 +253,22 @@ Estimate ionospheric phase for each date. We highly recommend inspecting all pai Typical anomalies include dense fringes caused by phase unwrapping errors, and a range ramp as a result of errors in estimating phase offsets for pairs with different swath starting ranges (check pairs_diff_starting_ranges.txt). +**run_ns+9_filtIonShift** + +Filter azimuth ionospheric shift. + +**run_ns+10_invertIonShift** + +Estimate azimuth ionospheric shift for each date. As in step **run_ns+8_invertIon**, check if there are anamolies. + +**run_ns+11_burstRampIon** + +Compute azimuth burst ramps as a result of ionosphere for each date. + +**run_ns+12_mergeBurstRampIon** + +Merge azimuth burst ramps. + #### 3. run command files generated #### Run the commands sequentially. @@ -259,20 +277,25 @@ Run the commands sequentially. Results from ionospheric phase estimation. -- reference and coreg_secondarys: now contains also subband burst SLCs -- ion: original ionospheric phase estimation results -- ion_dates: ionospheric phase for each acquistion -- ion/date1_date2/ion_cal/filt.ion: filtered ionospheric phase -- ion/date1_date2/ion_cal/raw_no_projection.ion: original ionospheric phase -- ion/date1_date2/lower/merged/fine_look.unw: unwrapped lower band interferogram -- ion/date1_date2/upper/merged/fine_look.unw: unwrapped upper band interferogram +- reference and coreg_secondarys: now contains also subband burst SLCs +- ion: original ionospheric phase estimation results +- ion_azshift_dates: azimuth ionospheric shift for each acquistion +- ion_burst_ramp_dates: azimuth burst ramps caused by ionosphere for each acquistion +- ion_burst_ramp_merged_dates: merged azimuth burst ramps caused by ionosphere for each acquistion +- ion_dates: ionospheric phase for each acquistion +- ion/date1_date2/ion_cal/azshift.ion: azimuth ionospheric shift +- ion/date1_date2/ion_cal/filt.ion: filtered ionospheric phase +- ion/date1_date2/ion_cal/raw_no_projection.ion: original ionospheric phase +- ion/date1_date2/lower/merged/fine_look.unw: unwrapped lower band interferogram +- ion/date1_date2/upper/merged/fine_look.unw: unwrapped upper band interferogram If ionospheric phase estimation processing is swath by swath because of different swath starting ranges, there will be swath processing directories including -- ion/date1_date2/ion_cal_IW* -- ion/date1_date2/lower/merged_IW* -- ion/date1_date2/upper/merged_IW* +- ion/date1_date2/ion_cal_IW* +- ion/date1_date2/lower/merged_IW* +- ion/date1_date2/upper/merged_IW* +Unit of azimuth ionospheric shift is number of single look azimuth lines. After processing, we can plot ionospheric phase estimation results using plotIonPairs.py and plotIonDates.py. For example ``` @@ -285,4 +308,5 @@ Relationships of the ionospheric phases: ``` ion_dates/date1.ion - ion_dates/date2.ion = ion/date1_date2/ion_cal/filt.ion ion_dates/date1.ion - ion_dates/date2.ion = ionospheric phase in merged/interferograms/date1_date2/filt_fine.unw +ion_burst_ramp_merged_dates/date1.float - ion_burst_ramp_merged_dates/date2.float = azimuth burst ramps caused by ionosphere in merged/interferograms/date1_date2/filt_fine.unw ``` diff --git a/contrib/stack/topsStack/Stack.py b/contrib/stack/topsStack/Stack.py index 4fd860f50..580ac6c77 100644 --- a/contrib/stack/topsStack/Stack.py +++ b/contrib/stack/topsStack/Stack.py @@ -339,8 +339,33 @@ def filtIon(self, function): self.f.write('win_min : ' + '{}'.format(self.win_min) + '\n') self.f.write('win_max : ' + '{}'.format(self.win_max) + '\n') + def filtIonShift(self, function): + self.f.write('###################################'+'\n') + self.f.write(function + '\n') + self.f.write('filtIonShift : ' + '\n') + self.f.write('reference_stack : ' + self.reference_stack + '\n') + self.f.write('reference : ' + self.reference + '\n') + self.f.write('secondary : ' + self.secondary + '\n') + self.f.write('input : ' + self.input + '\n') + self.f.write('coherence : ' + self.coherence + '\n') + self.f.write('output : ' + self.output + '\n') + self.f.write('win_min : ' + '{}'.format(self.win_min) + '\n') + self.f.write('win_max : ' + '{}'.format(self.win_max) + '\n') + self.f.write('nrlks : ' + '{}'.format(self.nrlks) + '\n') + self.f.write('nalks : ' + '{}'.format(self.nalks) + '\n') + def burstRampIon(self, function): + self.f.write('###################################'+'\n') + self.f.write(function + '\n') + self.f.write('burstRampIon : ' + '\n') + self.f.write('reference_stack : ' + self.reference_stack + '\n') + self.f.write('input : ' + self.input + '\n') + self.f.write('output : ' + self.output + '\n') + self.f.write('nrlks : ' + '{}'.format(self.nrlks) + '\n') + self.f.write('nalks : ' + '{}'.format(self.nalks) + '\n') + self.f.write('ion_height : ' + '{}'.format(self.ion_height) + '\n') + def write_wrapper_config2run_file(self, configName, line_cnt, numProcess = 1): # dispassionate list of commands for single process @@ -1330,7 +1355,7 @@ def unwrap_ion(self, pairs_same_starting_ranges_update, pairs_diff_starting_rang configObj.defoMax = '2' configObj.rangeLooks = '{}'.format(ionParamUsrObj.ION_numberRangeLooks0) configObj.azimuthLooks = '{}'.format(ionParamUsrObj.ION_numberAzimuthLooks0) - configObj.rmfilter = False + configObj.rmFilter = False configObj.unwMethod = 'snaphu' configObj.unwrap(function) @@ -1348,7 +1373,7 @@ def unwrap_ion(self, pairs_same_starting_ranges_update, pairs_diff_starting_rang configObj.defoMax = '2' configObj.rangeLooks = '{}'.format(ionParamUsrObj.ION_numberRangeLooks0) configObj.azimuthLooks = '{}'.format(ionParamUsrObj.ION_numberAzimuthLooks0) - configObj.rmfilter = False + configObj.rmFilter = False configObj.unwMethod = 'snaphu' configObj.unwrap(function) @@ -1542,6 +1567,105 @@ def invertIon(self): self.runf.write(self.text_cmd + cmd + '\n') + def filtIonShift(self, pairs): + + ionParamUsrObj = ionParamUsr(self.param_ion) + ionParamUsrObj.configure() + + line_cnt = 0 + for p in pairs: + configName = os.path.join(self.config_path,'config_filtIonShift_{}_{}'.format(p[0], p[1])) + configObj = config(configName) + configObj.configure(self) + configObj.reference_stack = os.path.join(self.work_dir, 'reference') + configObj.reference = os.path.join(self.work_dir, 'coreg_secondarys', '{}'.format(p[0])) + configObj.secondary = os.path.join(self.work_dir, 'coreg_secondarys', '{}'.format(p[1])) + configObj.input = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'filt.ion') + configObj.coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'raw_no_projection.cor') + configObj.output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'azshift.ion') + configObj.win_min = ionParamUsrObj.ION_ionshiftFilteringWinsizeMin + configObj.win_max = ionParamUsrObj.ION_ionshiftFilteringWinsizeMax + configObj.nrlks = ionParamUsrObj.ION_numberRangeLooks + configObj.nalks = ionParamUsrObj.ION_numberAzimuthLooks + configObj.filtIonShift('[Function-1]') + configObj.finalize() + + line_cnt += 1 + #line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess) + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + + + def invertIonShift(self): + + ionParamUsrObj = ionParamUsr(self.param_ion) + ionParamUsrObj.configure() + + ion_in = os.path.join(self.work_dir,'ion') + ion_out = os.path.join(self.work_dir,'ion_azshift_dates') + hgt = os.path.join(self.work_dir,'merged/geom_reference/hgt.rdr') + + cmd = 'invertIon.py --idir {} --filename azshift.ion --odir {} --nrlks1 {} --nalks1 {} --nrlks2 {} --nalks2 {} --merged_geom {} --interp --msk_overlap'.format(ion_in,ion_out,ionParamUsrObj.ION_numberRangeLooks, ionParamUsrObj.ION_numberAzimuthLooks, self.rangeLooks, self.azimuthLooks,hgt) + + self.runf.write(self.text_cmd + cmd + '\n') + + + def burstRampIon(self, dates): + + ionParamUsrObj = ionParamUsr(self.param_ion) + ionParamUsrObj.configure() + + line_cnt = 0 + for p in dates: + configName = os.path.join(self.config_path,'config_burstRampIon_{}'.format(p)) + configObj = config(configName) + configObj.configure(self) + configObj.reference_stack = os.path.join(self.work_dir, 'reference') + configObj.input = os.path.join(self.work_dir, 'ion_azshift_dates', '{}.ion'.format(p)) + configObj.output = os.path.join(self.work_dir, 'ion_burst_ramp_dates', '{}'.format(p)) + configObj.nrlks = self.rangeLooks + configObj.nalks = self.azimuthLooks + configObj.ion_height = ionParamUsrObj.ION_ionHeight + configObj.burstRampIon('[Function-1]') + configObj.finalize() + + line_cnt += 1 + #line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess) + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + + + def mergeBurstRampIon(self, dates, stackReferenceDate): + + line_cnt = 0 + for d in dates: + configName = os.path.join(self.config_path,'config_mergeBurstRampIon_' + d) + configObj = config(configName) + configObj.configure(self) + configObj.stack = os.path.join(self.work_dir, 'stack') + if d == stackReferenceDate: + configObj.reference = os.path.join(self.work_dir, 'reference') + else: + configObj.reference = os.path.join(self.work_dir, 'coreg_secondarys/' + d) + configObj.dirName = os.path.join(self.work_dir, 'ion_burst_ramp_dates/' + d) + configObj.namePattern = 'burst*float' + configObj.mergedFile = os.path.join(self.work_dir, 'ion_burst_ramp_merged_dates/' + d + '.float') + configObj.mergeBurstsMethod = 'top' + if d == stackReferenceDate: + configObj.aligned = 'False' + else: + configObj.aligned = 'True' + configObj.validOnly = 'True' + configObj.useVirtualFiles = 'True' + configObj.multiLook = 'True' + configObj.mergeBurst('[Function-1]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + + def finalize(self): self.runf.close() #writeJobFile(self.run_outname) diff --git a/contrib/stack/topsStack/burstRampIon.py b/contrib/stack/topsStack/burstRampIon.py new file mode 100755 index 000000000..d2c810fcb --- /dev/null +++ b/contrib/stack/topsStack/burstRampIon.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2022 + +import os +import copy +import glob +import shutil +import argparse +import numpy as np + +import isce +import isceobj +import s1a_isce_utils as ut +from VRTManager import Swath +from isceobj.TopsProc.runIon import computeDopplerOffset + + +def createParser(): + parser = argparse.ArgumentParser(description='compute burst phase ramps using azimuth ionospheric shift') + parser.add_argument('-k', '--reference_stack', type=str, dest='reference_stack', required=True, + help='Directory with the reference image of the stack') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='input azimuth ionospheric shift') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='Directory with output') + parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1, + help='number of range looks of azimuth ionospheric shift. Default: 1') + parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1, + help='number of azimuth looks of azimuth ionospheric shift. Default: 1') + parser.add_argument('-t', '--ion_height', dest='ion_height', type=float, default=200.0, + help='height of ionospheric layer above the Earth surface in km. Default: 200.0') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + compute burst phase ramps using azimuth ionospheric shift + both regular and subband bursts are merged in a box defined by reference of stack + + ''' + inps = cmdLineParse(iargs) + + #ionospheric layer height (m) + ionHeight = inps.ion_height * 1000.0 + #earth's radius (m) + earthRadius = 6371 * 1000.0 + + img = isceobj.createImage() + img.load(inps.input + '.xml') + width = img.width + length = img.length + ionShift = np.fromfile(inps.input, dtype=np.float32).reshape(length, width) + + swathList = ut.getSwathList(inps.reference_stack) + frameReferenceAll = [ut.loadProduct(os.path.join(inps.reference_stack, 'IW{0}.xml'.format(swath))) for swath in swathList] + + refSwaths = [Swath(x) for x in frameReferenceAll] + topSwath = min(refSwaths, key = lambda x: x.sensingStart) + botSwath = max(refSwaths, key = lambda x: x.sensingStop) + leftSwath = min(refSwaths, key = lambda x: x.nearRange) + rightSwath = max(refSwaths, key = lambda x: x.farRange) + + totalWidth = int(np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1)) + totalLength = int(np.round((botSwath.sensingStop - topSwath.sensingStart).total_seconds()/topSwath.dt + 1 )) + + #!!!should CHECK if input ionospheric shift is really in this geometry + nearRange = leftSwath.nearRange + sensingStart = topSwath.sensingStart + dr = leftSwath.dr + dt = topSwath.dt + + for swath in swathList: + frameReference = ut.loadProduct(os.path.join(inps.reference_stack, 'IW{0}.xml'.format(swath))) + swathObj = Swath(frameReference) + + minBurst = frameReference.bursts[0].burstNumber + maxBurst = frameReference.bursts[-1].burstNumber + #!!!should CHECK if input ionospheric shift is really in this geometry + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + midBurstIndex = round((minBurst + maxBurst) / 2.0) - minBurst + midBurst = frameReference.bursts[midBurstIndex] + + outputDir = os.path.join(inps.output, 'IW{0}'.format(swath)) + os.makedirs(outputDir, exist_ok=True) + + #compute mean offset: indices start from 0 + firstLine = int(np.round((swathObj.sensingStart - sensingStart).total_seconds()/(dt*inps.nalks))) + 1 + lastLine = int(np.round((swathObj.sensingStop - sensingStart).total_seconds()/(dt*inps.nalks))) - 1 + firstColumn = int(np.round((swathObj.nearRange - nearRange)/(dr*inps.nrlks))) + 1 + lastColumn = int(np.round((swathObj.farRange - nearRange)/(dr*inps.nrlks))) - 1 + ionShiftSwath = ionShift[firstLine:lastLine+1, firstColumn:lastColumn+1] + ionShiftSwathValid = ionShiftSwath[np.nonzero(ionShiftSwath!=0)] + + if ionShiftSwathValid.size == 0: + ionShiftSwathMean = 0.0 + print('mean azimuth ionospheric shift of swath {}: {} single look azimuth lines'.format(swath, ionShiftSwathMean)) + else: + ionShiftSwathMean = np.mean(ionShiftSwathValid, dtype=np.float64) + print('mean azimuth ionospheric shift of swath {}: {} single look azimuth lines'.format(swath, ionShiftSwathMean)) + + for burst in frameReference.bursts: + #compute mean offset: indices start from 0 + firstLine = int(np.round((burst.sensingStart - sensingStart).total_seconds()/(dt*inps.nalks))) + 1 + lastLine = int(np.round((burst.sensingStop - sensingStart).total_seconds()/(dt*inps.nalks))) - 1 + firstColumn = int(np.round((burst.startingRange - nearRange)/(dr*inps.nrlks))) + 1 + lastColumn = int(np.round((burst.startingRange + (burst.numberOfSamples - 1) * dr - nearRange)/(dr*inps.nrlks))) - 1 + ionShiftBurst = ionShift[firstLine:lastLine+1, firstColumn:lastColumn+1] + + ionShiftBurstValid = ionShiftBurst[np.nonzero(ionShiftBurst!=0)] + if ionShiftBurstValid.size < (lastLine - firstLine + 1) * (lastColumn - firstColumn + 1) / 2.0: + ionShiftBurstMean = 0.0 + print('mean azimuth ionospheric shift of burst {}: 0.0 single look azimuth lines'.format(burst.burstNumber)) + else: + #ionShiftBurstMean should use both phaseRamp1 and phaseRamp2, while + #ionShiftSwathMean should use phaseRamp2 only as in (to be consistent with) previous ESD + #The above is tested against runIon.py in topsApp.py + #ionShiftBurstMean = np.mean(ionShiftBurstValid, dtype=np.float64) - ionShiftSwathMean + ionShiftBurstMean = np.mean(ionShiftBurstValid, dtype=np.float64) + print('mean azimuth ionospheric shift of burst {}: {} single look azimuth lines'.format(burst.burstNumber, ionShiftBurstMean)) + + #compute burst phase ramps + (dopplerOffset, Ka) = computeDopplerOffset(burst, 1, burst.numberOfLines, 1, burst.numberOfSamples, nrlks=1, nalks=1) + + #satellite height + satHeight = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getPosition()) + #orgininal doppler offset should be multiplied by this ratio + ratio = ionHeight/(satHeight-earthRadius) + + #phaseRamp1 and phaseRamp2 have same sign according to Liang et. al., 2019. + #phase ramp due to imaging geometry + phaseRamp1 = dopplerOffset * burst.azimuthTimeInterval * ratio * (burst.azimuthTimeInterval * Ka[None,:] * 4.0 * np.pi) + #phase ramp due to non-zero doppler centroid frequency, ESD + phaseRamp2 = dopplerOffset * burst.azimuthTimeInterval * Ka[None,:] * 2.0 * np.pi * burst.azimuthTimeInterval + phaseRamp = (phaseRamp1 + phaseRamp2) * ionShiftBurstMean - phaseRamp2 * ionShiftSwathMean + + outfile = os.path.join(outputDir, '%s_%02d.float'%('burst', burst.burstNumber)) + + phaseRamp.astype(np.float32).tofile(outfile) + + image = isceobj.createImage() + image.setDataType('FLOAT') + image.setFilename(outfile) + image.extraFilename = outfile + '.vrt' + image.setWidth(burst.numberOfSamples) + image.setLength(burst.numberOfLines) + image.renderHdr() + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/dloadOrbits.py b/contrib/stack/topsStack/dloadOrbits.py index af038ebe5..0073bd428 100755 --- a/contrib/stack/topsStack/dloadOrbits.py +++ b/contrib/stack/topsStack/dloadOrbits.py @@ -5,33 +5,43 @@ import argparse import glob import requests -from html.parser import HTMLParser +from types import SimpleNamespace +import json +import time fmt = '%Y%m%d' today = datetime.datetime.now().strftime(fmt) -server = 'https://scihub.copernicus.eu/gnss/' queryfmt = '%Y-%m-%d' datefmt = '%Y%m%dT%H%M%S' -#Generic credentials to query and download orbit files -credentials = ('gnssguest', 'gnssguest') - S1Astart = '20140901' S1Astart_dt = datetime.datetime.strptime(S1Astart, '%Y%m%d') S1Bstart = '20160501' S1Bstart_dt = datetime.datetime.strptime(S1Bstart, '%Y%m%d') +satName = ['S1A', 'S1B'] + def cmdLineParse(): ''' Automated download of orbits. ''' parser = argparse.ArgumentParser('S1A and 1B AUX_POEORB precise orbit downloader') - parser.add_argument('--start', '-b', dest='start', type=str, default=S1Astart, help='Start date') - parser.add_argument('--end', '-e', dest='end', type=str, default=today, help='Stop date') - parser.add_argument('--dir', '-d', dest='dirname', type=str, default='.', help='Directory with precise orbits') + parser.add_argument('--start', '-b', dest='start', type=str, default=S1Astart, + help='Start date') + parser.add_argument('--end', '-e', dest='end', type=str, default=today, + help='Stop date') + parser.add_argument('--dir', '-d', dest='dirname', type=str, default='.', + help='Directory with precise orbits') + parser.add_argument('-t', '--token-file', dest='token_file', type=str, default='.copernicus_dataspace_token', + help='Filename to save auth token file') + parser.add_argument('-u', '--username', dest='username', type=str, default=None, + help='Copernicus Data Space Ecosystem username') + parser.add_argument('-p', '--password', dest='password', type=str, default=None, + help='Copernicus Data Space Ecosystem password') + return parser.parse_args() @@ -59,38 +69,47 @@ def gatherExistingOrbits(dirname): for name in fnames: rangeList.append(fileToRange(name)) - print(rangeList) return rangeList -def ifAlreadyExists(indate, mission, rangeList): - ''' - Check if given time spanned by current list. - ''' - found = False - - if mission == 'S1B': - if not validS1BDate(indate): - print('Valid: ', indate) - return True - - for pair in rangeList: - if (indate > pair[0]) and (indate < pair[1]) and (mission == pair[2]): - found = True - break - - return found - - -def validS1BDate(indate): - if indate < S1Bstart_dt: - return False - else: - return True - - -def download_file(url, outdir='.', session=None): + +def get_saved_token_data(token_file): + try: + with open(token_file, 'r') as file: + return json.load(file) + except FileNotFoundError: + return None + +def save_token_data(access_token, expires_in, token_file): + token_data = { + "access_token": access_token, + "expires_at": time.time() + expires_in + } + with open(token_file, 'w') as file: + json.dump(token_data, file) + + +def is_token_valid(token_data): + if token_data and "expires_at" in token_data: + return time.time() < token_data["expires_at"] + return False + + +def get_new_token(username, password, session): + data = { + "client_id": "cdse-public", + "username": username, + "password": password, + "grant_type": "password", + } + url = "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token" + response = session.post(url, data=data) + response.raise_for_status() + token_info = response.json() + return token_info["access_token"], token_info["expires_in"] + +def download_file(file_id, outdir='.', session=None, token=None): ''' Download file to specified directory. ''' @@ -98,12 +117,16 @@ def download_file(url, outdir='.', session=None): if session is None: session = requests.session() + url = "https://zipper.dataspace.copernicus.eu/odata/v1/" + url += f"Products({file_id})/$value" + path = outdir print('Downloading URL: ', url) - request = session.get(url, stream=True, verify=True, auth=credentials) + request = session.get(url, stream=True, verify=True, + headers = {"Authorization": f"Bearer {token}"}) try: - request.raise_for_status() + val = request.raise_for_status() success = True except: success = False @@ -118,29 +141,6 @@ def download_file(url, outdir='.', session=None): return success -class MyHTMLParser(HTMLParser): - - def __init__(self,url): - HTMLParser.__init__(self) - self.fileList = [] - self._url = url - - def handle_starttag(self, tag, attrs): - for name, val in attrs: - if name == 'href': - if val.startswith("https://scihub.copernicus.eu/gnss/odata") and val.endswith(")/"): - pass - else: - downloadLink = val.strip() - downloadLink = downloadLink.split("/Products('Quicklook')") - downloadLink = downloadLink[0] + downloadLink[-1] - self._url = downloadLink - - def handle_data(self, data): - if data.startswith("S1") and data.endswith(".EOF"): - self.fileList.append((self._url, data.strip())) - - if __name__ == '__main__': ''' Main driver. @@ -148,6 +148,10 @@ def handle_data(self, data): # Parse command line inps = cmdLineParse() + username = inps.username + password = inps.password + token_file = os.path.expanduser(inps.token_file) + match = None ###Compute interval tstart = datetime.datetime.strptime(inps.start, fmt) @@ -158,46 +162,79 @@ def handle_data(self, data): ranges = gatherExistingOrbits(inps.dirname) - for dd in range(days): - indate = tstart + datetime.timedelta(days=dd, hours=12) - timebef = indate - datetime.timedelta(days=1) - timeaft = indate + datetime.timedelta(days=1) - timebef=str(timebef.strftime('%Y-%m-%d')) - timeaft = str(timeaft.strftime('%Y-%m-%d')) - url = server + 'search?q= ( beginPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] AND endPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] ) AND ( (platformname:Sentinel-1 AND producttype:AUX_POEORB))'.format(timebef, timeaft) - session = requests.session() + oType = 'precise' + timebef = tstart - datetime.timedelta(days=1) + timeaft = tend + datetime.timedelta(days=1) + timebef = str(timebef.strftime('%Y-%m-%d')) + timeaft = str(timeaft.strftime('%Y-%m-%d')) + + + session = requests.Session() + + start_time = timebef + "T00:00:00.000Z" + stop_time = timeaft + "T23:59:59.999Z" + url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products" + + for mission in satName: + query = " and ".join([ + f"ContentDate/Start gt '{start_time}'", + f"ContentDate/Start lt '{stop_time}'", + f"ContentDate/End gt '{start_time}'", + f"ContentDate/End lt '{stop_time}'", + f"startswith(Name,'{mission}')", + f"contains(Name,'AUX_POEORB')", + ]) + + match = None success = False - for selectMission in ['S1A', 'S1B']: - if not ifAlreadyExists(indate, selectMission, ranges): - try: - r = session.get(url, verify=True, auth=credentials) - r.raise_for_status() - parser = MyHTMLParser(url) - parser.feed(r.text) - - for resulturl, result in parser.fileList: - tbef, taft, mission = fileToRange(os.path.basename(result)) - if selectMission==mission: - matchFileName = result - match = resulturl - - - if match is not None: - success = True - except: - pass + try: + r = session.get(url, verify=True, params={"$filter": query}) + r.raise_for_status() + + entries = json.loads(r.text, object_hook=lambda x: SimpleNamespace(**x)).value + for entry in entries: + entry_datefmt = "%Y-%m-%dT%H:%M:%S.000000Z" + tbef = datetime.datetime.strptime(entry.ContentDate.Start, entry_datefmt) + taft = datetime.datetime.strptime(entry.ContentDate.End, entry_datefmt) + matchFileName = entry.Name + match = entry.Id if match is not None: - + token_data = get_saved_token_data(token_file) + if token_data and is_token_valid(token_data): + print("using saved access token") + token = token_data["access_token"] + else: + print("generating a new access token") + if username is None or password is None: + try: + import netrc + host = "dataspace.copernicus.eu" + creds = netrc.netrc().hosts[host] + except: + if username is None: + username = input("Username: ") + if password is None: + from getpass import getpass + password = getpass("Password (will not be displayed): ") + else: + if username is None: + username, _, _ = creds + if password is None: + _, _, password = creds + token, expires_in = get_new_token(username, password, session) + save_token_data(token, expires_in, token_file) + output = os.path.join(inps.dirname, matchFileName) - print(output) - res = download_file(match, output, session) - else: - print('Failed to find {1} orbits for tref {0}'.format(indate, selectMission)) + res = download_file(match, output, session, token) + print("Orbit is downloaded successfully") + print(" ") - else: - print('Already exists: ', selectMission, indate) + else: + print("Orbit is not downloaded successfully") + print(" ") - print('Exit dloadOrbits Successfully') + except: + raise diff --git a/contrib/stack/topsStack/fetchOrbit.py b/contrib/stack/topsStack/fetchOrbit.py index fc69ceb4e..8a4d29c2a 100755 --- a/contrib/stack/topsStack/fetchOrbit.py +++ b/contrib/stack/topsStack/fetchOrbit.py @@ -1,25 +1,19 @@ #!/usr/bin/env python3 -import numpy as np +from types import SimpleNamespace +import json import requests import re import os import argparse import datetime -from html.parser import HTMLParser - -server = 'https://scihub.copernicus.eu/gnss/' +import time orbitMap = [('precise', 'AUX_POEORB'), ('restituted', 'AUX_RESORB')] datefmt = "%Y%m%dT%H%M%S" queryfmt = "%Y-%m-%d" -queryfmt2 = "%Y/%m/%d/" - -#Generic credentials to query and download orbit files -credentials = ('gnssguest', 'gnssguest') - def cmdLineParse(): ''' @@ -31,6 +25,12 @@ def cmdLineParse(): help='Path to SAFE package of interest') parser.add_argument('-o', '--output', dest='outdir', type=str, default='.', help='Path to output directory') + parser.add_argument('-t', '--token-file', dest='token_file', type=str, default='.copernicus_dataspace_token', + help='Filename to save auth token file') + parser.add_argument('-u', '--username', dest='username', type=str, default=None, + help='Copernicus Data Space Ecosystem username') + parser.add_argument('-p', '--password', dest='password', type=str, default=None, + help='Copernicus Data Space Ecosystem password') return parser.parse_args() @@ -56,30 +56,44 @@ def FileToTimeStamp(safename): return tstamp, satName, sstamp -class MyHTMLParser(HTMLParser): - - def __init__(self,url): - HTMLParser.__init__(self) - self.fileList = [] - self._url = url - - def handle_starttag(self, tag, attrs): - for name, val in attrs: - if name == 'href': - if val.startswith("https://scihub.copernicus.eu/gnss/odata") and val.endswith(")/"): - pass - else: - downloadLink = val.strip() - downloadLink = downloadLink.split("/Products('Quicklook')") - downloadLink = downloadLink[0] + downloadLink[-1] - self._url = downloadLink - - def handle_data(self, data): - if data.startswith("S1") and data.endswith(".EOF"): - self.fileList.append((self._url, data.strip())) - - -def download_file(url, outdir='.', session=None): +def get_saved_token_data(token_file): + try: + with open(token_file, 'r') as file: + return json.load(file) + except FileNotFoundError: + return None + + +def save_token_data(access_token, expires_in, token_file): + token_data = { + "access_token": access_token, + "expires_at": time.time() + expires_in + } + with open(token_file, 'w') as file: + json.dump(token_data, file) + + +def is_token_valid(token_data): + if token_data and "expires_at" in token_data: + return time.time() < token_data["expires_at"] + return False + + +def get_new_token(username, password, session): + data = { + "client_id": "cdse-public", + "username": username, + "password": password, + "grant_type": "password", + } + url = "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token" + response = session.post(url, data=data) + response.raise_for_status() + token_info = response.json() + return token_info["access_token"], token_info["expires_in"] + + +def download_file(file_id, outdir='.', session=None, token=None): ''' Download file to specified directory. ''' @@ -87,9 +101,13 @@ def download_file(url, outdir='.', session=None): if session is None: session = requests.session() + url = "https://zipper.dataspace.copernicus.eu/odata/v1/" + url += f"Products({file_id})/$value" + path = outdir print('Downloading URL: ', url) - request = session.get(url, stream=True, verify=True, auth=credentials) + request = session.get(url, stream=True, verify=True, + headers = {"Authorization": f"Bearer {token}"}) try: val = request.raise_for_status() @@ -107,25 +125,15 @@ def download_file(url, outdir='.', session=None): return success -def fileToRange(fname): - ''' - Derive datetime range from orbit file name. - ''' - - fields = os.path.basename(fname).split('_') - start = datetime.datetime.strptime(fields[-2][1:16], datefmt) - stop = datetime.datetime.strptime(fields[-1][:15], datefmt) - mission = fields[0] - - return (start, stop, mission) - - if __name__ == '__main__': ''' Main driver. ''' inps = cmdLineParse() + username = inps.username + password = inps.password + token_file = os.path.expanduser(inps.token_file) fileTS, satName, fileTSStart = FileToTimeStamp(inps.input) print('Reference time: ', fileTS) @@ -134,38 +142,78 @@ def fileToRange(fname): session = requests.Session() for spec in orbitMap: - oType = spec[0] delta = datetime.timedelta(days=1) timebef = (fileTS - delta).strftime(queryfmt) timeaft = (fileTS + delta).strftime(queryfmt) - url = server + 'search?q=( beginPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] AND endPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] ) AND ( (platformname:Sentinel-1 AND filename:{2}_* AND producttype:{3}))&start=0&rows=100'.format(timebef,timeaft, satName,spec[1]) - + url = "https://catalogue.dataspace.copernicus.eu/odata/v1/Products" + + start_time = timebef + "T00:00:00.000Z" + stop_time = timeaft + "T23:59:59.999Z" + + query = " and ".join([ + f"ContentDate/Start gt '{start_time}'", + f"ContentDate/Start lt '{stop_time}'", + f"ContentDate/End gt '{start_time}'", + f"ContentDate/End lt '{stop_time}'", + f"startswith(Name,'{satName}')", + f"contains(Name,'{spec[1]}')", + ]) + success = False match = None - + try: - r = session.get(url, verify=True, auth=credentials) + r = session.get(url, verify=True, params={"$filter": query}) r.raise_for_status() - parser = MyHTMLParser(url) - parser.feed(r.text) - for resulturl, result in parser.fileList: - tbef, taft, mission = fileToRange(os.path.basename(result)) + + entries = json.loads(r.text, object_hook=lambda x: SimpleNamespace(**x)).value + + for entry in entries: + entry_datefmt = "%Y-%m-%dT%H:%M:%S.000000Z" + tbef = datetime.datetime.strptime(entry.ContentDate.Start, entry_datefmt) + taft = datetime.datetime.strptime(entry.ContentDate.End, entry_datefmt) if (tbef <= fileTSStart) and (taft >= fileTS): - matchFileName = result - match = resulturl + matchFileName = entry.Name + match = entry.Id if match is not None: success = True except: - pass + raise if success: break if match is not None: + + token_data = get_saved_token_data(token_file) + if token_data and is_token_valid(token_data): + print("using saved access token") + token = token_data["access_token"] + else: + print("generating a new access token") + if username is None or password is None: + try: + import netrc + host = "dataspace.copernicus.eu" + creds = netrc.netrc().hosts[host] + except: + if username is None: + username = input("Username: ") + if password is None: + from getpass import getpass + password = getpass("Password (will not be displayed): ") + else: + if username is None: + username, _, _ = creds + if password is None: + _, _, password = creds + token, expires_in = get_new_token(username, password, session) + save_token_data(token, expires_in, token_file) + output = os.path.join(inps.outdir, matchFileName) - res = download_file(match, output, session) + res = download_file(match, output, session, token) if res is False: - print('Failed to download URL: ', match) + print('Failed to download orbit ID:', match) else: print('Failed to find {1} orbits for tref {0}'.format(fileTS, satName)) diff --git a/contrib/stack/topsStack/filtIonShift.py b/contrib/stack/topsStack/filtIonShift.py new file mode 100755 index 000000000..3bec81c91 --- /dev/null +++ b/contrib/stack/topsStack/filtIonShift.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2022 + +import os +import copy +import glob +import shutil +import argparse +import numpy as np + +import isce +import isceobj +from isceobj.TopsProc.runIon import adaptive_gaussian +from isceobj.TopsProc.runIon import weight_fitting +from isceobj.TopsProc.runIon import fit_surface +from isceobj.TopsProc.runIon import cal_surface +import s1a_isce_utils as ut + + +def createParser(): + parser = argparse.ArgumentParser(description='compute and filter azimuth ionospheric shift [unit: masBurst.azimuthTimeInterval]') + parser.add_argument('-k', '--reference_stack', type=str, dest='reference_stack', required=True, + help='Directory with the reference image of the stack') + parser.add_argument('-f', '--reference', type=str, dest='reference', required=True, + help='Directory with the reference image (coregistered or reference of the stack)') + parser.add_argument('-s', '--secondary', type=str, dest='secondary', required=True, + help='Directory with the secondary image (coregistered or reference of the stack)') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='input ionosphere') + parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True, + help='coherence, e.g. raw_no_projection.cor') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='output azimuth ionospheric shift') + parser.add_argument('-b', '--win_min', dest='win_min', type=int, default=100, + help='minimum filtering window size') + parser.add_argument('-d', '--win_max', dest='win_max', type=int, default=200, + help='maximum filtering window size') + parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1, + help='number of range looks. Default: 1') + parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1, + help='number of azimuth looks. Default: 1') + #parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', action='append', default=None, + # help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 --masked_areas 110 120 110 120') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + check overlap among all acquistions, only keep the bursts that in the common overlap, + and then renumber the bursts. + ''' + inps = cmdLineParse(iargs) + + ''' + calculate azimuth shift caused by ionosphere using ionospheric phase + ''' + + ################################################# + #SET PARAMETERS HERE + #gaussian filtering window size + #size = np.int(np.around(width / 12.0)) + #size = ionParam.ionshiftFilteringWinsize + size_max = inps.win_max + size_min = inps.win_min + + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + #if applying polynomial fitting + #0: no fitting, 1: with fitting + fit = 0 + corThresholdIonshift = 0.85 + ################################################# + + +#################################################################### + #STEP 1. GET DERIVATIVE OF IONOSPHERE +#################################################################### + + #get files + ionfile = inps.input + #we are using filtered ionosphere, so we should use coherence file that is not projected. + #corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCor) + corfile = inps.coherence + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + ion = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + + ######################################################################################## + #AFTER COHERENCE IS RESAMPLED AT grd2ion, THERE ARE SOME WIRED VALUES + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + ######################################################################################## + + #get the azimuth derivative of ionosphere + dion = np.diff(ion, axis=0) + dion = np.concatenate((dion, np.zeros((1,width))), axis=0) + + #remove the samples affected by zeros + flag_ion0 = (ion!=0) + #moving down by one line + flag_ion1 = np.roll(flag_ion0, 1, axis=0) + flag_ion1[0,:] = 0 + #moving up by one line + flag_ion2 = np.roll(flag_ion0, -1, axis=0) + flag_ion2[-1,:] = 0 + #now remove the samples affected by zeros + flag_ion = flag_ion0 * flag_ion1 * flag_ion2 + dion *= flag_ion + + flag = flag_ion * (cor>corThresholdIonshift) + index = np.nonzero(flag) + + +#################################################################### + #STEP 2. FIT A POLYNOMIAL TO THE DERIVATIVE OF IONOSPHERE +#################################################################### + + order = 3 + + #look for data to use + point_index = np.nonzero(flag) + m = point_index[0].shape[0] + + #calculate input index matrix + x0=np.matlib.repmat(np.arange(width), length, 1) + y0=np.matlib.repmat(np.arange(length).reshape(length, 1), 1, width) + + x = x0[point_index].reshape(m, 1) + y = y0[point_index].reshape(m, 1) + z = dion[point_index].reshape(m, 1) + w = cor[point_index].reshape(m, 1) + + #convert to higher precision type before use + x=np.asfarray(x,np.float64) + y=np.asfarray(y,np.float64) + z=np.asfarray(z,np.float64) + w=np.asfarray(w,np.float64) + coeff = fit_surface(x, y, z, w, order) + + rgindex = np.arange(width) + azindex = np.arange(length).reshape(length, 1) + #convert to higher precision type before use + rgindex=np.asfarray(rgindex,np.float64) + azindex=np.asfarray(azindex,np.float64) + dion_fit = cal_surface(rgindex, azindex, coeff, order) + + #no fitting + if fit == 0: + dion_fit *= 0 + dion_res = (dion - dion_fit)*(dion!=0) + + +#################################################################### + #STEP 3. FILTER THE RESIDUAL OF THE DERIVATIVE OF IONOSPHERE +#################################################################### + + #this will be affected by low coherence areas like water, so not use this. + #filter the derivation of ionosphere + #if size % 2 == 0: + # size += 1 + #sigma = size / 2.0 + + #g2d = gaussian(size, sigma, scale=1.0) + #scale = ss.fftconvolve((dion_res!=0), g2d, mode='same') + #dion_filt = ss.fftconvolve(dion_res, g2d, mode='same') / (scale + (scale==0)) + + #minimize the effect of low coherence pixels + cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + dion_filt = adaptive_gaussian(dion_res, cor, size_max, size_min) + + dion = (dion_fit + dion_filt)*(dion!=0) + + #return dion + + +#################################################################### + #STEP 4. CONVERT TO AZIMUTH SHIFT +#################################################################### + + #!!! use number of swaths in reference for now + + #these are coregistered secondaries, so there should be <= swaths in reference of entire stack + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + #swathList = ut.getSwathList(inps.reference) + firstSwath = None + for swath in swathList: + frameReference = ut.loadProduct(os.path.join(inps.reference_stack, 'IW{0}.xml'.format(swath))) + + minBurst = frameReference.bursts[0].burstNumber + maxBurst = frameReference.bursts[-1].burstNumber + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + else: + firstSwath = swath + break + if firstSwath is None: + raise Exception('no valid swaths!') + + midBurstIndex = round((minBurst + maxBurst) / 2.0) - minBurst + masBurst = frameReference.bursts[midBurstIndex] + + #shift casued by ionosphere [unit: masBurst.azimuthTimeInterval] + rng = masBurst.rangePixelSize * ((np.arange(width))*inps.nrlks + (inps.nrlks - 1.0) / 2.0) + masBurst.startingRange + Ka = masBurst.azimuthFMRate(rng) + ionShift = dion / (masBurst.azimuthTimeInterval * inps.nalks) / (4.0 * np.pi) / Ka[None, :] / masBurst.azimuthTimeInterval + + #output + outfile = inps.output + os.makedirs(os.path.dirname(outfile), exist_ok=True) + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = ionShift + ion.astype(np.float32).tofile(outfile) + img.filename = outfile + img.extraFilename = outfile + '.vrt' + img.renderHdr() + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/invertIon.py b/contrib/stack/topsStack/invertIon.py index b3e5f1e9f..43abd5f19 100755 --- a/contrib/stack/topsStack/invertIon.py +++ b/contrib/stack/topsStack/invertIon.py @@ -96,6 +96,8 @@ def cmdLineParse(): parser = argparse.ArgumentParser(description='least squares estimation') parser.add_argument('--idir', dest='idir', type=str, required=True, help = 'input directory where each pair (YYYYMMDD_YYYYMMDD) is located. only folders are recognized') + parser.add_argument('--filename', dest='filename', type=str, default='filt.ion', + help = 'input file name. default: filt.ion') parser.add_argument('--odir', dest='odir', type=str, required=True, help = 'output directory for estimated result of each date') parser.add_argument('--zro_date', dest='zro_date', type=str, default=None, @@ -205,7 +207,7 @@ def cmdLineParse(): ndate = len(dates) npair = len(pairs) - ionfile = os.path.join(idir, pairs[0], 'ion_cal', 'filt.ion') + ionfile = os.path.join(idir, pairs[0], 'ion_cal', inps.filename) img = isceobj.createImage() img.load(ionfile+'.xml') @@ -219,7 +221,7 @@ def cmdLineParse(): wls = False stdPairs = np.ones((npair, length, width), dtype=np.float32) for i in range(npair): - ionfile = os.path.join(idir, pairs[i], 'ion_cal', 'filt.ion') + ionfile = os.path.join(idir, pairs[i], 'ion_cal', inps.filename) ionPairs[i, :, :] = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] #flag of valid/invalid is defined by amplitde image amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] diff --git a/contrib/stack/topsStack/ion_param.txt b/contrib/stack/topsStack/ion_param.txt index 3896804dd..a01bbbbf9 100644 --- a/contrib/stack/topsStack/ion_param.txt +++ b/contrib/stack/topsStack/ion_param.txt @@ -2,6 +2,9 @@ ###the values below are the default values used by the module ###remove # to set the parameters +#consider burst properties in ionosphere computation: False +#height of ionosphere layer in km: 200.0 + #maximum window size for filtering ionosphere phase: 200 #minimum window size for filtering ionosphere phase: 100 #maximum window size for filtering ionosphere azimuth shift: 150 diff --git a/contrib/stack/topsStack/mergeBursts.py b/contrib/stack/topsStack/mergeBursts.py index 964cb8c03..651ca5886 100755 --- a/contrib/stack/topsStack/mergeBursts.py +++ b/contrib/stack/topsStack/mergeBursts.py @@ -18,6 +18,7 @@ from isceobj.Util.ImageUtil import ImageLib as IML from isceobj.Util.decorators import use_api import s1a_isce_utils as ut +from isce.applications.gdal2isce_xml import gdal2isce_xml def createParser(): @@ -313,6 +314,7 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d gdal.Translate(outname, ds, options=options_str) # generate VRT file gdal.Translate(outname+".vrt", outname, options='-of VRT') + gdal2isce_xml(outname) else: from mroipac.looks.Looks import Looks diff --git a/contrib/stack/topsStack/plotIonDates.py b/contrib/stack/topsStack/plotIonDates.py index ea641682c..fcfa59fed 100755 --- a/contrib/stack/topsStack/plotIonDates.py +++ b/contrib/stack/topsStack/plotIonDates.py @@ -86,7 +86,7 @@ def cmdLineParse(): for ipair in pairs: ion = os.path.join(idir, ipair + '.ion') runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, odir)) - runCmd("montage -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format( + runCmd("montage -font DejaVu-Sans -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format( int((ratio*width)/111*9+0.5), ipair, os.path.join(odir, 'out.ppm'), diff --git a/contrib/stack/topsStack/plotIonPairs.py b/contrib/stack/topsStack/plotIonPairs.py index 5e695a7ce..966917171 100755 --- a/contrib/stack/topsStack/plotIonPairs.py +++ b/contrib/stack/topsStack/plotIonPairs.py @@ -86,7 +86,7 @@ def cmdLineParse(): for ipair in pairs: ion = os.path.join(idir, ipair, 'ion_cal', 'filt.ion') runCmd('mdx {} -s {} -rhdr {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, width*4, odir)) - runCmd("montage -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format( + runCmd("montage -font DejaVu-Sans -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format( int((ratio*width)/111*9+0.5), ipair, os.path.join(odir, 'out.ppm'), diff --git a/contrib/stack/topsStack/s1_select_ion.py b/contrib/stack/topsStack/s1_select_ion.py index ec517d1a5..75d705517 100755 --- a/contrib/stack/topsStack/s1_select_ion.py +++ b/contrib/stack/topsStack/s1_select_ion.py @@ -422,8 +422,8 @@ def cmdLineParse(): parser = argparse.ArgumentParser( description='select Sentinel-1A/B acquistions good for ionosphere correction. not used slices are moved to folder: not_used') parser.add_argument('-dir', dest='dir', type=str, required=True, help = 'directory containing the "S1*_IW_SLC_*.zip" files') - parser.add_argument('-sn', dest='sn', type=str, required=True, - help='south/north bound of area of interest, format: south/north') + parser.add_argument('-sn', dest='sn', type=float, nargs=2, required=True, + help='south and north bound of area of interest, format: -sn south north') parser.add_argument('-nr', dest='nr', type=int, default=10, help = 'minimum number of acquisitions for same starting ranges. default: 10') @@ -438,7 +438,8 @@ def cmdLineParse(): if __name__ == '__main__': inps = cmdLineParse() - s,n=[float(x) for x in inps.sn.split('/')] + #s,n=[float(x) for x in inps.sn.split('/')] + s,n=inps.sn #group the slices group = get_group(inps.dir) diff --git a/contrib/stack/topsStack/stackSentinel.py b/contrib/stack/topsStack/stackSentinel.py index ef92360e0..24639d189 100755 --- a/contrib/stack/topsStack/stackSentinel.py +++ b/contrib/stack/topsStack/stackSentinel.py @@ -14,7 +14,7 @@ import isce import isceobj from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1 -from topsStack.Stack import config, run, sentinelSLC +from topsStack.Stack import config, run, sentinelSLC, ionParamUsr helpstr = """ @@ -607,8 +607,9 @@ def checkCurrentStatusIonosphere(inps): acquisitionDates, stackReferenceDate, secondaryDates, safe_dict = get_dates(inps) pairs_same_starting_ranges, pairs_diff_starting_ranges = selectNeighborPairsIonosphere(safe_dict, inps.num_connections_ion) + dateListIon = getDatesIonosphere(pairs_same_starting_ranges, pairs_diff_starting_ranges) pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update = excludeExistingPairsIonosphere(pairs_same_starting_ranges, pairs_diff_starting_ranges, inps.work_dir) - dateListIon = getDatesIonosphere(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update) + dateListIonUpdate = getDatesIonosphere(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update) #report pairs of different swath starting ranges. pdiff = 'ionosphere phase estimation pairs with different swath starting ranges\n' @@ -623,7 +624,7 @@ def checkCurrentStatusIonosphere(inps): with open('pairs_diff_starting_ranges.txt', 'w') as f: f.write(pdiff) - return dateListIon, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict + return dateListIonUpdate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, dateListIon ######################################## @@ -821,12 +822,12 @@ def offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe return i -def ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i): +def ionosphereStack(inps, dateListIonUpdate, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i): i+=1 runObj = run() runObj.configure(inps, 'run_{:02d}_subband_and_resamp'.format(i)) - runObj.subband_and_resamp(dateListIon, stackReferenceDate) + runObj.subband_and_resamp(dateListIonUpdate, stackReferenceDate) runObj.finalize() i+=1 @@ -871,6 +872,38 @@ def ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_r runObj.invertIon() runObj.finalize() + + ionParamUsrObj = ionParamUsr(inps.param_ion) + ionParamUsrObj.configure() + if ionParamUsrObj.ION_considerBurstProperties: + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_filtIonShift'.format(i)) + runObj.filtIonShift(pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update) + runObj.finalize() + + #From now on, need to reprocess all dates/pairs, not only newly added + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_invertIonShift'.format(i)) + runObj.invertIonShift() + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_burstRampIon'.format(i)) + runObj.burstRampIon(dateListIon) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_mergeBurstRampIon'.format(i)) + runObj.mergeBurstRampIon(dateListIon, stackReferenceDate) + runObj.finalize() + + del ionParamUsrObj + + return i @@ -1015,8 +1048,19 @@ def main(iargs=None): elif not os.path.isfile(inps.param_ion): print("Ion parameter file is missing. Ionospheric estimation will not be done.") else: - dateListIon, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict = checkCurrentStatusIonosphere(inps) - i = ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i) + ############################################# + #check if consider burst properties + ionParamUsrObj = ionParamUsr(inps.param_ion) + ionParamUsrObj.configure() + if ionParamUsrObj.ION_considerBurstProperties: + if inps.coregistration != 'NESD': + raise Exception("Coregistration options '-C', '--coregistration' must be NESD when 'consider burst properties in ionosphere computation' is True") + del ionParamUsrObj + ############################################# + + dateListIonUpdate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, dateListIon = checkCurrentStatusIonosphere(inps) + i = ionosphereStack(inps, dateListIonUpdate, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i) + if __name__ == "__main__": # Main engine diff --git a/docker/Dockerfile b/docker/Dockerfile index 7617352f8..3581314ad 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -64,3 +64,5 @@ RUN set -ex \ COPY --from=builder /tmp/isce*.deb /tmp/isce2.deb RUN dpkg -i /tmp/isce2.deb + +RUN ln -s /usr/lib/python3.8/dist-packages/isce2 /usr/lib/python3.8/dist-packages/isce diff --git a/examples/input_files/alos2/alos2App.xml b/examples/input_files/alos2/alos2App.xml index 6d8c5e5ba..2f51bacbc 100644 --- a/examples/input_files/alos2/alos2App.xml +++ b/examples/input_files/alos2/alos2App.xml @@ -10,7 +10,7 @@ ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + + ../orbits/LT1A_20230228143413185_V20230209T235500_20230211T000500_ABSORBIT_SCIE.xml + + + reference + +