Skip to content

Commit 0be247c

Browse files
Merge pull request #15 from supathdhitalGEO/main
added USGS, GeoGLOWS module, update statistics, update the vizualization and code usage note
2 parents 41d8aca + 15b7017 commit 0be247c

15 files changed

+324
-245
lines changed

README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ FIMserv/
3838
│ └── fimserve/
3939
│ ├── streamflowdata/ # Handles streamflow data
4040
│ │ ├── nwmretrospectivedata.py # Processes NWM retrospective data
41+
│ │ ├── geoglows.py # Module to retrieve geoglows streamflow data
42+
│ │ ├── usgsdata.py # Retrieve USGS gauge station data
4143
│ │ └── forecasteddata.py # Processes all range forecasted streamflow data
4244
│ ├── plots/ # Vizualization functionalities
4345
│ ├── FIMsubset/ # Subsetting functionalities for FIM
@@ -61,6 +63,8 @@ FIMserv/
6163

6264
Although not mandatory,
6365
**we strongly recommend users create a virtual environment and install this package on that virtual environment to avoid the conflict between system dependencies and package dependencies.**
66+
67+
**‼️ If your system doesnot have git, install it first. Download link of git for windows or MacOS: https://git-scm.com/downloads**
6468
```bash
6569
#creating a virtual environment using conda
6670
conda create --name fimserve python==3.10
41.4 KB
Binary file not shown.

dist/fimserve-0.1.77.tar.gz

37 KB
Binary file not shown.

docs/code_usage.ipynb

Lines changed: 89 additions & 79 deletions
Large diffs are not rendered by default.

poetry.lock

Lines changed: 96 additions & 91 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "fimserve"
3-
version = "0.1.76"
3+
version = "0.1.77"
44
description = "Framework which is developed with the purpose of quickly generating Flood Inundation Maps (FIM) for emergency response and risk assessment. It is developed under Surface Dynamics Modeling Lab (SDML)."
55
authors = ["Supath Dhital <sdhital@crimson.ua.edu>"]
66
license = "GPL-3.0"

src/fimserve/runFIM.py

Lines changed: 18 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,7 @@
77

88
from .datadownload import setup_directories
99

10-
11-
def runfim(code_dir, output_dir, HUC_code, data_dir):
10+
def runfim(code_dir, output_dir, HUC_code, data_dir, depth=False):
1211
original_dir = os.getcwd()
1312
try:
1413
tools_path = os.path.join(code_dir, "tools")
@@ -18,12 +17,11 @@ def runfim(code_dir, output_dir, HUC_code, data_dir):
1817
load_dotenv(dotenv_path)
1918
sys.path.append(src_path)
2019
sys.path.append(code_dir)
20+
2121
HUC_code = str(HUC_code)
2222
HUC_dir = os.path.join(output_dir, f"flood_{HUC_code}")
23-
2423
csv_path = data_dir
2524

26-
# Get the inundation file path
2725
discharge_basename = os.path.basename(data_dir).split(".")[0]
2826
inundation_dir = os.path.join(HUC_dir, f"{HUC_code}_inundation")
2927
temp_dir = os.path.join(inundation_dir, "temp")
@@ -32,24 +30,21 @@ def runfim(code_dir, output_dir, HUC_code, data_dir):
3230
os.makedirs(temp_dir)
3331

3432
inundation_file = os.path.join(temp_dir, f"{discharge_basename}_inundation.tif")
35-
# depth_file = os.path.join(
36-
# temp_dir, f"{discharge_basename}_depth.tif"
37-
# )
38-
3933
Command = [
4034
sys.executable,
4135
"inundate_mosaic_wrapper.py",
42-
"-y",
43-
HUC_dir,
44-
"-u",
45-
HUC_code,
46-
"-f",
47-
csv_path,
48-
"-i",
49-
inundation_file,
50-
# "-d",
51-
# depth_file,
36+
"-y", HUC_dir,
37+
"-u", HUC_code,
38+
"-f", csv_path,
39+
"-i", inundation_file,
5240
]
41+
42+
if depth:
43+
depth_file = os.path.join(temp_dir, f"{discharge_basename}_depth.tif")
44+
Command += ["-d", depth_file]
45+
else:
46+
depth_file = None
47+
5348
env = os.environ.copy()
5449
env["PYTHONPATH"] = f"{src_path}{os.pathsep}{code_dir}"
5550

@@ -61,34 +56,29 @@ def runfim(code_dir, output_dir, HUC_code, data_dir):
6156
stderr=subprocess.PIPE,
6257
)
6358

64-
# Print the output and error (if any)
6559
print(result.stdout.decode())
6660
if result.stderr:
6761
print(result.stderr.decode())
6862

69-
# Check if the command was successful
7063
if result.returncode == 0:
7164
print(f"Inundation mapping for {HUC_code} completed successfully.")
7265

73-
# Move the generated files to inundation_dir
74-
# if os.path.exists(inundation_file) and os.path.exists(depth_file):
7566
if os.path.exists(inundation_file):
7667
shutil.move(inundation_file, inundation_dir)
77-
# shutil.move(depth_file, inundation_dir)
7868

79-
# Delete the temp directory
69+
if depth and depth_file and os.path.exists(depth_file):
70+
shutil.move(depth_file, inundation_dir)
71+
8072
if os.path.exists(temp_dir):
8173
shutil.rmtree(temp_dir)
82-
8374
else:
8475
print(f"Failed to complete inundation mapping for {HUC_code}.")
8576

8677
finally:
8778
os.chdir(original_dir)
8879

89-
90-
def runOWPHANDFIM(huc):
80+
def runOWPHANDFIM(huc, depth=False):
9181
code_dir, data_dir, output_dir = setup_directories()
9282
discharge = glob.glob(os.path.join(data_dir, f"*{huc}*.csv"))
9383
for file in discharge:
94-
runfim(code_dir, output_dir, huc, file)
84+
runfim(code_dir, output_dir, huc, file, depth=depth)

src/fimserve/statistics/calculatestatistics.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -21,23 +21,21 @@ def calculate_metrics(nwm_data, usgs_data):
2121
(usgs_data - np.mean(usgs_data)) ** 2
2222
)
2323
apb = np.sum(np.abs(nwm_data - usgs_data)) / np.sum(usgs_data) * 100
24-
r2 = r2_score(usgs_data, nwm_data)
2524

26-
return {"KGE": kge, "PBias (%)": apb, "R²": r2, "NSE": nse}
25+
return {"KGE": kge, "PBias (%)": apb, "NSE": nse}
2726

2827

2928
# Dual-axis visualization
3029
def visualize_comparison(metrics, output_dir, usgs_site, huc):
31-
metric_names = ["KGE", "R²", "NSE", "PBias (%)"]
30+
metric_names = ["KGE", "NSE", "PBias (%)"]
3231
metric_values = [
3332
metrics["KGE"],
34-
metrics["R²"],
3533
metrics["NSE"],
3634
metrics["PBias (%)"],
3735
]
3836

3937
fig, ax1 = plt.subplots(figsize=(6, 4))
40-
bar_colors = ["tab:blue", "tab:green", "tab:purple", "tab:orange"]
38+
bar_colors = ["tab:blue", "tab:purple", "tab:orange"]
4139

4240
# Bar plot for the first three metrics
4341
ax1.bar(
@@ -54,7 +52,7 @@ def visualize_comparison(metrics, output_dir, usgs_site, huc):
5452
ax1.grid(axis="y", linestyle="--", alpha=1, zorder=0)
5553

5654
# Vertical line between the third and fourth bars
57-
plt.axvline(x=2.5, color="black", linestyle="--", linewidth=1.5)
55+
plt.axvline(x=1.5, color="black", linestyle="--", linewidth=1.5)
5856

5957
# Bar plot for PBias on secondary y-axis
6058
ax2 = ax1.twinx()
@@ -70,22 +68,22 @@ def visualize_comparison(metrics, output_dir, usgs_site, huc):
7068
ax2.tick_params(axis="y", labelcolor="red", labelsize=14)
7169
ax2.set_ylim(0, metric_values[-1] * 1.2)
7270

73-
# Add text on bars
7471
padding = 0.01
7572
for i, val in enumerate(metric_values):
76-
ax = ax1 if i < 3 else ax2
77-
text_color = "red" if i == 3 else "black" # Red text for PBias
73+
ax = ax1 if i < 2 else ax2
74+
text_color = "red" if i == 2 else "black"
75+
# Adjust padding depending on axis
76+
pad = padding if i < 2 else val * 0.05 # 5% of PBias value
7877
ax.text(
7978
i,
80-
val + padding,
79+
val + pad,
8180
f"{val:.2f}",
8281
ha="center",
8382
va="bottom",
8483
fontsize=12,
8584
fontweight="bold",
8685
color=text_color,
8786
)
88-
8987
# Title and labels
9088
ax1.set_xlabel("Statistical Metrics", fontsize=16)
9189
plt.xticks(ticks=range(len(metric_names)), labels=metric_names, fontsize=14)

src/fimserve/streamflowdata/nwmretrospective.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,16 +18,16 @@ def getdischargeforspecifiedtime(
1818
df = pd.read_parquet(file)
1919
all_data = pd.concat([all_data, df], ignore_index=True)
2020

21-
df["value_time"] = pd.to_datetime(df["value_time"])
21+
all_data["value_time"] = pd.to_datetime(all_data["value_time"])
2222

2323
locationID_df = pd.read_csv(location_ids)
2424
location_ids = [f"nwm30-{int(fid)}" for fid in locationID_df["feature_id"]]
2525

2626
specific_date = pd.to_datetime(specific_date)
2727
if date_type == "date":
28-
filtered_df = df[
29-
(df["location_id"].isin(location_ids))
30-
& (df["value_time"].dt.date == specific_date.date())
28+
filtered_df = all_data[
29+
(all_data["location_id"].isin(location_ids))
30+
& (all_data["value_time"].dt.date == specific_date.date())
3131
].copy()
3232
filtered_df["feature_id"] = filtered_df["location_id"].str.replace("nwm30-", "")
3333
discharge_data = (
@@ -38,8 +38,8 @@ def getdischargeforspecifiedtime(
3838
)
3939
formatted_datetime = specific_date.strftime("%Y%m%d")
4040
else:
41-
filtered_df = df[
42-
(df["location_id"].isin(location_ids)) & (df["value_time"] == specific_date)
41+
filtered_df = all_data[
42+
(all_data["location_id"].isin(location_ids)) & (all_data["value_time"] == specific_date)
4343
].copy()
4444
filtered_df.loc[:, "feature_id"] = filtered_df["location_id"].str.replace(
4545
"nwm30-", ""

src/fimserve/streamflowdata/usgsdata.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ def getusgs_discharge(
2828
output_parquet_dir=output_dir,
2929
overwrite_output=True,
3030
)
31-
print(f"USGS discharge data saved to {output_dir}.")
3231

3332
#If value_times is mentioned and user need the discharge for specific time
3433
def getdischargeforspecifiedtime(
@@ -73,7 +72,9 @@ def getdischargeforspecifiedtime(
7372
.reset_index()
7473
.rename(columns={"value": "discharge"})
7574
)
76-
75+
#Make sure the columns are in int and float
76+
discharge_data = discharge_data.astype({"feature_id": int, "discharge": float})
77+
7778
formatted_datetime = specific_date.strftime("%Y%m%d") if date_type == "date" else specific_date.strftime("%Y%m%d%H%M%S")
7879
finalHANDdischarge_dir = os.path.join(data_dir, f"USGS_{formatted_datetime}_{huc}.csv")
7980
discharge_data.to_csv(finalHANDdischarge_dir, index=False)
@@ -87,6 +88,9 @@ def getUSGSsitedata(huc=None, start_date = None, end_date= None, usgs_sites=None
8788
assign a feature_id corresponding to the usgs_sites and save in the data/inputs as required by the FIMserv.
8889
"""
8990
code_dir, data_dir, output_dir = setup_directories()
91+
HUC_dir = os.path.join(output_dir, f"flood_{huc}")
92+
featureID_dir = os.path.join(HUC_dir, f"feature_IDs.csv")
93+
9094
def process_value_times(huc_key, value_times_list, allow_cleanup=False):
9195
site_data = GetUSGSIDandCorrFID(huc_key)
9296
usgs_ids = site_data["USGS gauge station ID"].tolist()

0 commit comments

Comments
 (0)