Skip to content
Snippets Groups Projects
Commit 44c1aad7 authored by Alex Rojas's avatar Alex Rojas
Browse files

updated CWD in main.py file to be 'os.path.dirname(os.path.abspath(__file__))'

parent 4c3ff98c
No related branches found
No related tags found
No related merge requests found
...@@ -21,8 +21,8 @@ from pgap import GapDS, wf_smooth ...@@ -21,8 +21,8 @@ from pgap import GapDS, wf_smooth
# import custom functions, etc. # import custom functions, etc.
from download_gedi import download_gedi from download_gedi import download_gedi
## GET CONSTANTS ## GET CWD of file to locate path
CWD = os.getcwd() CWD = os.path.dirname(os.path.abspath(__file__))
## Function to return beam dataframe with some meta data ## Function to return beam dataframe with some meta data
def get_beam_gdf(beam,l1b_ds,l2a_ds): def get_beam_gdf(beam,l1b_ds,l2a_ds):
......
%% Cell type:code id:25172537-794b-4135-bb28-63c0fcefbb68 tags: %% Cell type:code id:25172537-794b-4135-bb28-63c0fcefbb68 tags:
``` python ``` python
import os import os
import re import re
from maap.maap import MAAP from maap.maap import MAAP
# maap = MAAP(maap_host='api.maap-project.org') # maap = MAAP(maap_host='api.maap-project.org')
maap = MAAP(maap_host='api.maap-project.org') maap = MAAP(maap_host='api.maap-project.org')
``` ```
%% Output %% Output
Unable to load config file from source maap.cfg Unable to load config file from source maap.cfg
Unable to load config file from source ./maap.cfg Unable to load config file from source ./maap.cfg
Unable to load config file from source /projects/maap.cfg Unable to load config file from source /projects/maap.cfg
%% Cell type:code id:f176e75b-5104-478b-ade6-a84006e966b0 tags: %% Cell type:code id:f176e75b-5104-478b-ade6-a84006e966b0 tags:
``` python ``` python
``` ```
%% Cell type:code id:b39e76ac-d765-4b8c-baf1-fe41f3296de9 tags: %% Cell type:code id:b39e76ac-d765-4b8c-baf1-fe41f3296de9 tags:
``` python ``` python
l1b_urls_fpath = "../GEDI-L1B-2021-URLS.txt" l1b_urls_fpath = "../GEDI-L1B-2021-URLS.txt"
l2a_urls_fpath = "../GEDI-L2A-2021-URLS.txt" l2a_urls_fpath = "../GEDI-L2A-2021-URLS.txt"
with open(l1b_urls_fpath) as f: with open(l1b_urls_fpath) as f:
l1b_fpaths = f.readlines() l1b_fpaths = f.readlines()
with open(l2a_urls_fpath) as f: with open(l2a_urls_fpath) as f:
l2a_fpaths = f.readlines() l2a_fpaths = f.readlines()
# Get matching string pattern and run main.py # Get matching string pattern and run main.py
jobs_list = [] jobs_list = []
counter=1 counter=1
for l1b_fp in l1b_fpaths: for l1b_fp in l1b_fpaths:
# Get string pattern # Get string pattern
str_pattern = re.findall("[0-9]{13}", os.path.basename(l1b_fp))[0] str_pattern = re.findall("[0-9]{13}", os.path.basename(l1b_fp))[0]
try: try:
l2a_fp = [s for s in l2a_fpaths if str_pattern in s][0] l2a_fp = [s for s in l2a_fpaths if str_pattern in s][0]
except: except:
print("No matching L2 file!", l1b_fp) print("No matching L2 file!", l1b_fp)
continue continue
print(l1b_fp) print(l1b_fp)
print(l2a_fp) print(l2a_fp)
job = maap.submitJob(identifier="single_file_test_run", job = maap.submitJob(identifier="single_file_test_run",
algo_id="arojas_biomass_gedi_conus", algo_id="arojas_biomass_gedi_conus",
version="master", version="master",
username="arojearthdata", username="arojearthdata",
queue="maap-dps-worker-8gb", queue="maap-dps-worker-8gb",
L1B_URL=l1b_fp, L1B_URL=l1b_fp,
L2A_URL=l2a_fp) L2A_URL=l2a_fp)
jobs_list.append(job) jobs_list.append(job)
if counter%20: if counter%20:
print("on file num: ", counter) print("on file num: ", counter)
counter+=1 counter+=1
break break
``` ```
%% Output %% Output
https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI01_B.002/2021.05.31/GEDI01_B_2021151223415_O13976_02_T00676_02_005_02_V002.h5 https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI01_B.002/2021.05.31/GEDI01_B_2021151223415_O13976_02_T00676_02_005_02_V002.h5
https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI02_A.002/2021.05.31/GEDI02_A_2021151223415_O13976_02_T00676_02_003_02_V002.h5 https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI02_A.002/2021.05.31/GEDI02_A_2021151223415_O13976_02_T00676_02_003_02_V002.h5
on file num: 1 on file num: 1
%% Cell type:code id:8f0b9f73-ffb1-40b7-be0e-b31480571a4a tags: %% Cell type:code id:8f0b9f73-ffb1-40b7-be0e-b31480571a4a tags:
``` python ``` python
print(jobs_list[:2]) print(jobs_list[:2])
``` ```
%% Output %% Output
[{'job_id': 'ba87642f-df6e-47f5-b509-0d94ca990b2d', 'status': 'Accepted', 'machine_type': None, 'architecture': None, 'machine_memory_size': None, 'directory_size': None, 'operating_system': None, 'job_start_time': None, 'job_end_time': None, 'job_duration_seconds': None, 'cpu_usage': None, 'cache_usage': None, 'mem_usage': None, 'max_mem_usage': None, 'swap_usage': None, 'read_io_stats': None, 'write_io_stats': None, 'sync_io_stats': None, 'async_io_stats': None, 'total_io_stats': None, 'error_details': None, 'response_code': 200, 'outputs': []}] [{'job_id': '4ad5a4ee-4859-47aa-b825-d2a85562129c', 'status': 'Accepted', 'machine_type': None, 'architecture': None, 'machine_memory_size': None, 'directory_size': None, 'operating_system': None, 'job_start_time': None, 'job_end_time': None, 'job_duration_seconds': None, 'cpu_usage': None, 'cache_usage': None, 'mem_usage': None, 'max_mem_usage': None, 'swap_usage': None, 'read_io_stats': None, 'write_io_stats': None, 'sync_io_stats': None, 'async_io_stats': None, 'total_io_stats': None, 'error_details': None, 'response_code': 200, 'outputs': []}]
%% Cell type:code id:7fe1c6f4-37c5-4fe7-b9ff-51e17b7a5cf0 tags: %% Cell type:code id:7fe1c6f4-37c5-4fe7-b9ff-51e17b7a5cf0 tags:
``` python ``` python
# View job status # View job status
for job in jobs_list: for job in jobs_list:
print(job.retrieve_status()) print(job.retrieve_status())
print(job.id) print(job.id)
break break
``` ```
%% Output %% Output
Accepted Failed
ba87642f-df6e-47f5-b509-0d94ca990b2d 4ad5a4ee-4859-47aa-b825-d2a85562129c
%% Cell type:code id:2018077f-9640-4e70-bd01-a9d939b3626a tags: %% Cell type:code id:2018077f-9640-4e70-bd01-a9d939b3626a tags:
``` python ``` python
# View job status # View job status
for job in jobs_list: for job in jobs_list:
print(job.retrieve_status()) print(job.retrieve_status())
print(job.id) print(job.id)
break break
``` ```
%% Output %% Output
Failed Failed
ba87642f-df6e-47f5-b509-0d94ca990b2d ba87642f-df6e-47f5-b509-0d94ca990b2d
%% Cell type:code id:873e9ea7-d6cb-4bb9-b082-5bffb4026615 tags: %% Cell type:code id:873e9ea7-d6cb-4bb9-b082-5bffb4026615 tags:
``` python ``` python
# Retrieve results # Retrieve results
for job in jobs_list: for job in jobs_list:
print(job.retrieve_result()) print(job.retrieve_result())
break break
``` ```
%% Output %% Output
['http://maap-ops-workspace.s3-website-us-west-2.amazonaws.com/dataset/triaged_job/arojas_biomass_gedi_conus/master/2023/09/27/ba87642f-df6e-47f5-b509-0d94ca990b2d', 's3://s3-us-west-2.amazonaws.com:80/maap-ops-workspace/dataset/triaged_job/arojas_biomass_gedi_conus/master/2023/09/27/ba87642f-df6e-47f5-b509-0d94ca990b2d', 'https://s3.console.aws.amazon.com/s3/buckets/maap-ops-workspace/dataset/triaged_job/arojas_biomass_gedi_conus/master/2023/09/27/ba87642f-df6e-47f5-b509-0d94ca990b2d/?region=us-east-1&tab=overview', '+ export HOME=/root\n+ HOME=/root\n+ exec /docker-stats-on-exit-shim _docker_stats.json /app/dps_wrapper.sh /app/biomass-gedi-conus/run.sh \'https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI01_B.002/2021.05.31/GEDI01_B_2021151223415_O13976_02_T00676_02_005_02_V002.h5\n\' https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI02_A.002/2021.05.31/GEDI02_A_2021151223415_O13976_02_T00676_02_003_02_V002.h5\n+ /app/biomass-gedi-conus/run.sh \'https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI01_B.002/2021.05.31/GEDI01_B_2021151223415_O13976_02_T00676_02_005_02_V002.h5\n\' https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI02_A.002/2021.05.31/GEDI02_A_2021151223415_O13976_02_T00676_02_003_02_V002.h5\nMatplotlib created a temporary cache directory at /tmp/matplotlib-ej7h32oy because the default path (/root/.config/matplotlib) is not a writable directory; it is highly recommended to set the MPLCONFIGDIR environment variable to a writable directory, in particular to speed up the import of Matplotlib and to better support multiprocessing.\nFontconfig error: No writable cache directories\nTraceback (most recent call last):\n File "fiona/ogrext.pyx", line 136, in fiona.ogrext.gdal_open_vector\n File "fiona/_err.pyx", line 291, in fiona._err.exc_wrap_pointer\nfiona._err.CPLE_OpenFailedError: /data/work/jobs/2023/09/27/04/01/job-arojas_biomass_gedi_conus__master-20230927T035526.152033Z/./NEON_Domains/NEON_Domains.shp: No such file or directory\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File "/app/biomass-gedi-conus/main.py", line 110, in <module>\n domain_polys = gpd.read_file(domain_poly_fp)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/geopandas/io/file.py", line 281, in _read_file\n return _read_file_fiona(\n ^^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/geopandas/io/file.py", line 322, in _read_file_fiona\n with reader(path_or_bytes, **kwargs) as features:\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/fiona/env.py", line 457, in wrapper\n return f(*args, **kwds)\n ^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/fiona/__init__.py", line 292, in open\n colxn = Collection(\n ^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/fiona/collection.py", line 243, in __init__\n self.session.start(self, **kwargs)\n File "fiona/ogrext.pyx", line 588, in fiona.ogrext.Session.start\n File "fiona/ogrext.pyx", line 143, in fiona.ogrext.gdal_open_vector\nfiona.errors.DriverError: /data/work/jobs/2023/09/27/04/01/job-arojas_biomass_gedi_conus__master-20230927T035526.152033Z/./NEON_Domains/NEON_Domains.shp: No such file or directory\n+ cp _stderr.txt _alt_traceback.txt\n'] ['http://maap-ops-workspace.s3-website-us-west-2.amazonaws.com/dataset/triaged_job/arojas_biomass_gedi_conus/master/2023/09/27/4ad5a4ee-4859-47aa-b825-d2a85562129c', 's3://s3-us-west-2.amazonaws.com:80/maap-ops-workspace/dataset/triaged_job/arojas_biomass_gedi_conus/master/2023/09/27/4ad5a4ee-4859-47aa-b825-d2a85562129c', 'https://s3.console.aws.amazon.com/s3/buckets/maap-ops-workspace/dataset/triaged_job/arojas_biomass_gedi_conus/master/2023/09/27/4ad5a4ee-4859-47aa-b825-d2a85562129c/?region=us-east-1&tab=overview', '+ export HOME=/root\n+ HOME=/root\n+ exec /docker-stats-on-exit-shim _docker_stats.json /app/dps_wrapper.sh /app/biomass-gedi-conus/run.sh \'https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI01_B.002/2021.05.31/GEDI01_B_2021151223415_O13976_02_T00676_02_005_02_V002.h5\n\' https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI02_A.002/2021.05.31/GEDI02_A_2021151223415_O13976_02_T00676_02_003_02_V002.h5\n+ /app/biomass-gedi-conus/run.sh \'https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI01_B.002/2021.05.31/GEDI01_B_2021151223415_O13976_02_T00676_02_005_02_V002.h5\n\' https://e4ftl01.cr.usgs.gov//GEDI_L1_L2/GEDI/GEDI02_A.002/2021.05.31/GEDI02_A_2021151223415_O13976_02_T00676_02_003_02_V002.h5\nMatplotlib created a temporary cache directory at /tmp/matplotlib-u_3gfo0a because the default path (/root/.config/matplotlib) is not a writable directory; it is highly recommended to set the MPLCONFIGDIR environment variable to a writable directory, in particular to speed up the import of Matplotlib and to better support multiprocessing.\nFontconfig error: No writable cache directories\nTraceback (most recent call last):\n File "fiona/ogrext.pyx", line 136, in fiona.ogrext.gdal_open_vector\n File "fiona/_err.pyx", line 291, in fiona._err.exc_wrap_pointer\nfiona._err.CPLE_OpenFailedError: /data/work/jobs/2023/09/27/05/17/job-arojas_biomass_gedi_conus__master-20230927T051209.837755Z/NEON_Domains/NEON_Domains.shp: No such file or directory\n\nDuring handling of the above exception, another exception occurred:\n\nTraceback (most recent call last):\n File "/app/biomass-gedi-conus/main.py", line 108, in <module>\n domain_polys = gpd.read_file(domain_poly_fp)\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/geopandas/io/file.py", line 281, in _read_file\n return _read_file_fiona(\n ^^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/geopandas/io/file.py", line 322, in _read_file_fiona\n with reader(path_or_bytes, **kwargs) as features:\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/fiona/env.py", line 457, in wrapper\n return f(*args, **kwds)\n ^^^^^^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/fiona/__init__.py", line 292, in open\n colxn = Collection(\n ^^^^^^^^^^^\n File "/opt/conda/envs/osgeo-env/lib/python3.11/site-packages/fiona/collection.py", line 243, in __init__\n self.session.start(self, **kwargs)\n File "fiona/ogrext.pyx", line 588, in fiona.ogrext.Session.start\n File "fiona/ogrext.pyx", line 143, in fiona.ogrext.gdal_open_vector\nfiona.errors.DriverError: /data/work/jobs/2023/09/27/05/17/job-arojas_biomass_gedi_conus__master-20230927T051209.837755Z/NEON_Domains/NEON_Domains.shp: No such file or directory\n+ cp _stderr.txt _alt_traceback.txt\n']
%% Cell type:code id:e8a40e13-dcdd-4b7f-a3b6-d060bb52eaf5 tags: %% Cell type:code id:e8a40e13-dcdd-4b7f-a3b6-d060bb52eaf5 tags:
``` python ``` python
``` ```
%% Cell type:code id:988420a0-1145-499c-927c-eec059c0dde9 tags: %% Cell type:code id:988420a0-1145-499c-927c-eec059c0dde9 tags:
``` python ``` python
``` ```
%% Cell type:code id:31626d2a-4edf-4572-81c2-7b68080e2dd3 tags: %% Cell type:code id:31626d2a-4edf-4572-81c2-7b68080e2dd3 tags:
``` python ``` python
``` ```
%% Cell type:code id:9ecb7ffc-319f-40c9-8f6e-d18b6ea1c890 tags: %% Cell type:code id:9ecb7ffc-319f-40c9-8f6e-d18b6ea1c890 tags:
``` python ``` python
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment