Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Apr 4, 2023
1 parent 1e33bea commit 3cc4db4
Show file tree
Hide file tree
Showing 14 changed files with 0 additions and 26 deletions.
2 changes: 0 additions & 2 deletions carbonplan_trace/fire/grid_viirs_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def calc_buffer_distance(lats, buffer_m=375):


def rasterize_geom(geoms, transform, shape):

r = rasterize(
[(geom, 1) for geom in geoms],
out_shape=shape,
Expand Down Expand Up @@ -85,7 +84,6 @@ def open_target_grid(tile_id):


def process_one_year(year):

print(year)
gdf = geopandas.read_parquet(
f's3://carbonplan-climatetrace/inputs/processed/viirs/{year}.parquet'
Expand Down
1 change: 0 additions & 1 deletion carbonplan_trace/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@


def get_cf_global_attrs(**attrs):

if 'history' not in attrs:
attrs['history'] = 'Created: {}'.format(time.ctime(time.time()))

Expand Down
5 changes: 0 additions & 5 deletions carbonplan_trace/v0/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ def process_one_tile(tile_id):

# calc emissions
if not (skip_existing and zarr_is_complete(tot_mapper) and zarr_is_complete(split_mapper)):

lat, lon = tile_id.split('_')
fire_da = open_fire_mask(tile_id).fillna(0)
change_ds = open_hansen_change_tile(lat, lon)
Expand Down Expand Up @@ -112,7 +111,6 @@ def process_one_tile(tile_id):
(tot_mapper, coarse_tot_mapper, tot_encoding),
(split_mapper, coarse_split_mapper, split_encoding),
]:

if not (skip_existing and zarr_is_complete(out_mapper)):
ds = xr.open_zarr(in_mapper, consolidated=True)

Expand All @@ -134,7 +132,6 @@ def combine_all_tiles(encoding_kinds=None):
encoding_kinds = [('tot', tot_encoding), ('split', split_encoding)]
print('combining all tiles')
for kind, encoding in encoding_kinds:

mapper = fsspec.get_mapper(coarse_full_template.format(kind=kind))

if not (skip_existing and zarr_is_complete(mapper)):
Expand Down Expand Up @@ -169,13 +166,11 @@ def rollup_shapes():
('tot', ['emissions']),
('split', ['emissions_from_clearing', 'emissions_from_fire']),
]:

ds = xr.open_zarr(coarse_full_template.format(kind=kind), consolidated=True)

mask = regionmask.mask_geopandas(shapes_df, ds['lon'], ds['lat'], numbers='numbers')

for var in var_names:

# this will trigger dask compute
df = ds[var].groupby(mask).sum().to_pandas()

Expand Down
3 changes: 0 additions & 3 deletions carbonplan_trace/v1/emissions_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,6 @@ def coarsen_tile(tile_id):
for in_mapper, out_mapper, encoding in [
(split_mapper, coarse_split_mapper, split_encoding),
]:

if not (skip_existing and zarr_is_complete(out_mapper)):
ds = utils.open_result_tile(
tile_id, variable='emissions', version='v1.2', resolution='30m', apply_masks=True
Expand All @@ -233,7 +232,6 @@ def combine_all_tiles(encoding_kinds=None):
print('combining all tiles')

for kind, encoding in encoding_kinds:

mapper = fsspec.get_mapper(coarse_full_template.format(kind=kind))

if not (skip_existing and zarr_is_complete(mapper)):
Expand Down Expand Up @@ -270,7 +268,6 @@ def rollup_shapes():
var_names = ['emissions_from_clearing', 'emissions_from_fire', 'sinks', 'net']

for var in var_names:

# this will trigger dask compute
df = ds[var].groupby(mask).sum().to_pandas()

Expand Down
2 changes: 0 additions & 2 deletions carbonplan_trace/v1/landsat_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,6 @@ def grab_scene_coord_info(metadata):


def get_scene_utm_info(url, json_client):

'''
Get the USGS-provided UTM zone and letter for the specific landsat scene
Expand Down Expand Up @@ -330,7 +329,6 @@ def calc_NDVI(ds):


def calc_NDII(ds):

'''
Calculate NDII (Hardisky et al, 1984) based upon bands 4 and 5. *Note* only valid
for landsat 5 and 7 right now.
Expand Down
2 changes: 0 additions & 2 deletions carbonplan_trace/v1/postprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,6 @@ def postprocess_subtile(parameters_dict):


def test_to_zarr(parameters_dict):

lat_increment = parameters_dict['LAT_INCREMENT']
lon_increment = parameters_dict['LON_INCREMENT']
year0 = parameters_dict['YEAR_0']
Expand All @@ -438,7 +437,6 @@ def test_to_zarr(parameters_dict):
data_mapper = fs.get_mapper(data_path)

with rio.Env(aws_session):

da = xr.DataArray(
np.ones((8000, 8000, 2)),
coords={
Expand Down
2 changes: 0 additions & 2 deletions carbonplan_trace/v1/training_prep.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,6 @@ def add_parquet_urls(df):


def find_pertinent_scenes_for_shots(lat_tag, lon_tag, scenes_in_tile_gdf):

file_mapper = fs.get_mapper(
"carbonplan-climatetrace/v2/data/intermediates/biomass/{}_{}.zarr".format(lat_tag, lon_tag)
)
Expand Down Expand Up @@ -235,7 +234,6 @@ def find_pertinent_scenes_for_shots(lat_tag, lon_tag, scenes_in_tile_gdf):
def aggregate_parquet_files(
lat_tag, lon_tag, all_parquet_files, write=True, access_key_id=None, secret_access_key=None
):

full_df = None
for url in all_parquet_files:
df = pd.read_parquet(url)
Expand Down
1 change: 0 additions & 1 deletion notebooks/analysis/validate_biomass.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,6 @@
" cbar_label=\"\",\n",
" cmap=\"viridis\",\n",
"):\n",
"\n",
" cax = fig.add_axes([x_location, y_location, width, height])\n",
" cax.text(\n",
" 0.5,\n",
Expand Down
1 change: 0 additions & 1 deletion notebooks/analysis/validate_biomass_training_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -889,7 +889,6 @@
" cbar_label=\"\",\n",
" cmap=\"viridis\",\n",
"):\n",
"\n",
" cax = fig.add_axes([x_location, y_location, width, height])\n",
" cax.text(\n",
" 0.5,\n",
Expand Down
2 changes: 0 additions & 2 deletions notebooks/analysis/validate_height_metric.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@
"outputs": [],
"source": [
"def merge_ds(study, ref, var_name, precision=3):\n",
"\n",
" variables_study = [\n",
" var for var in list(study.variables.keys()) if var_name.lower() in var.lower()\n",
" ]\n",
Expand Down Expand Up @@ -526,7 +525,6 @@
"}\n",
"\n",
"for var in [\"QMCH\", \"MeanH\"]:\n",
"\n",
" joined = merge_ds(study=sub, ref=margolis, var_name=var, precision=3)\n",
"\n",
" plt.figure(figsize=(13, 4.5))\n",
Expand Down
1 change: 0 additions & 1 deletion notebooks/processing/change_point_detection.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,6 @@
" # if this subtile has already been ran, continue\n",
" continue\n",
" else:\n",
"\n",
" increment_parameters = prefect_parameters.copy()\n",
" increment_parameters[\"LAT_INCREMENT\"] = lat_increment\n",
" increment_parameters[\"LON_INCREMENT\"] = lon_increment\n",
Expand Down
2 changes: 0 additions & 2 deletions notebooks/processing/model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@
"\n",
"\n",
"def get_all_prediction_result(model, df_train, df_test, df_val):\n",
"\n",
" df_train[\"biomass_pred\"] = model._predict(df_train)\n",
" df_test[\"biomass_pred\"] = model._predict(df_test)\n",
" df_val[\"biomass_pred\"] = model._predict(df_val)\n",
Expand Down Expand Up @@ -687,7 +686,6 @@
"\n",
" # this for loop is for running different parameter sets in HPO\n",
" for params in [{}]:\n",
"\n",
" # instantiating the model also does .fit\n",
" # this will load the model if it already exist and overwrite=False, and fit the model if overwrite=True or the model does not exist\n",
" model = model_class(\n",
Expand Down
1 change: 0 additions & 1 deletion scripts/cache_glas.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,6 @@ def cmr_download(urls, cache_location, credentials=None):


def main():

url_list = cmr_search(
short_name,
version,
Expand Down
1 change: 0 additions & 1 deletion scripts/preprocess_glas.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,6 @@ def main(products=['GLAH01', 'GLAH14']):


if __name__ == '__main__':

client = Client(n_workers=12)
print(client)
print(client.dashboard_link)
Expand Down

0 comments on commit 3cc4db4

Please sign in to comment.