Skip to content

Commit

Permalink
ruff pass
Browse files Browse the repository at this point in the history
  • Loading branch information
atmorling committed Dec 30, 2024
1 parent 7d3f0f4 commit 76308fc
Show file tree
Hide file tree
Showing 37 changed files with 1,161 additions and 332 deletions.
16 changes: 12 additions & 4 deletions ecoscope/analysis/UD/etd_range.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,9 @@ def calculate_etd_range(
max_speed_percentage: float = 0.9999,
raster_profile: raster.RasterProfile = None,
expansion_factor: float = 1.3,
weibull_pdf: typing.Union[Weibull2Parameter, Weibull3Parameter] = Weibull2Parameter(),
weibull_pdf: typing.Union[
Weibull2Parameter, Weibull3Parameter
] = Weibull2Parameter(),
) -> None:
"""
The ETDRange class provides a trajectory-based, nonparametric approach to estimate the utilization distribution (UD)
Expand All @@ -113,7 +115,9 @@ def calculate_etd_range(
"""

# if two-parameter weibull has default values; run an optimization routine to auto-determine parameters
if isinstance(weibull_pdf, Weibull2Parameter) and all([weibull_pdf.shape == 1.0, weibull_pdf.scale == 1.0]):
if isinstance(weibull_pdf, Weibull2Parameter) and all(
[weibull_pdf.shape == 1.0, weibull_pdf.scale == 1.0]
):
speed_kmhr = trajectory_gdf.speed_kmhr
shape, scale = weibull_pdf.fit(speed_kmhr)

Expand All @@ -137,7 +141,9 @@ def calculate_etd_range(
y_max += dy

# update the raster extent for the raster profile
raster_profile.raster_extent = raster.RasterExtent(x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max)
raster_profile.raster_extent = raster.RasterExtent(
x_min=x_min, x_max=x_max, y_min=y_min, y_max=y_max
)

# determine the output raster size
num_rows, num_columns = (
Expand Down Expand Up @@ -168,7 +174,9 @@ def calculate_etd_range(
grid_centroids[0, 0] = x_min + raster_profile.pixel_size * 0.5
grid_centroids[1, 0] = y_max - raster_profile.pixel_size * 0.5

centroids_coords = np.dot(grid_centroids, np.mgrid[1:2, :num_columns, :num_rows].T.reshape(-1, 3, 1))
centroids_coords = np.dot(
grid_centroids, np.mgrid[1:2, :num_columns, :num_rows].T.reshape(-1, 3, 1)
)

tr = neighbors.KDTree(centroids_coords.squeeze().T)

Expand Down
46 changes: 35 additions & 11 deletions ecoscope/analysis/astronomy.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,33 +36,46 @@ def to_EarthLocation(geometry):
proj_to="+proj=geocent +ellps=WGS84 +datum=WGS84 +units=m +no_defs",
)
return EarthLocation.from_geocentric(
*trans.transform(xx=geometry.x, yy=geometry.y, zz=np.zeros(geometry.shape[0])), unit="m"
*trans.transform(xx=geometry.x, yy=geometry.y, zz=np.zeros(geometry.shape[0])),
unit="m",
)


def is_night(geometry, time):
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "Geometry is in a geographic CRS.", UserWarning)
warnings.filterwarnings(
"ignore", "Geometry is in a geographic CRS.", UserWarning
)
return astroplan.Observer(to_EarthLocation(geometry.centroid)).is_night(time)


def sun_time(date, geometry):
midnight = Time(
datetime(date.year, date.month, date.day) + timedelta(seconds=1), scale="utc"
) # add 1 second shift to avoid leap_second_strict warning
observer = astroplan.Observer(location=EarthLocation(lon=geometry.centroid.x, lat=geometry.centroid.y))
sunrise = observer.sun_rise_time(midnight, which="next", n_grid_points=150).to_datetime(timezone=pytz.UTC)
sunset = observer.sun_set_time(midnight, which="next", n_grid_points=150).to_datetime(timezone=pytz.UTC)
observer = astroplan.Observer(
location=EarthLocation(lon=geometry.centroid.x, lat=geometry.centroid.y)
)
sunrise = observer.sun_rise_time(
midnight, which="next", n_grid_points=150
).to_datetime(timezone=pytz.UTC)
sunset = observer.sun_set_time(
midnight, which="next", n_grid_points=150
).to_datetime(timezone=pytz.UTC)
return pd.Series({"sunrise": sunrise, "sunset": sunset})


def calculate_day_night_distance(date, segment_start, segment_end, dist_meters, daily_summary):
def calculate_day_night_distance(
date, segment_start, segment_end, dist_meters, daily_summary
):
sunrise = daily_summary.loc[date, "sunrise"]
sunset = daily_summary.loc[date, "sunset"]

if segment_start < sunset and segment_end > sunset: # start in day and end in night
day_percent = (sunset - segment_start) / (segment_end - segment_start)
elif segment_start < sunrise and segment_end > sunrise: # start in night and end in day
elif (
segment_start < sunrise and segment_end > sunrise
): # start in night and end in day
day_percent = (segment_end - sunrise) / (segment_end - segment_start)
elif sunrise < sunset:
if segment_end < sunrise or segment_start > sunset: # all night
Expand All @@ -83,16 +96,27 @@ def get_nightday_ratio(gdf):
gdf["date"] = pd.to_datetime(gdf["segment_start"]).dt.date

daily_summary = gdf.groupby("date").first()["geometry"].reset_index()
daily_summary[["sunrise", "sunset"]] = daily_summary.apply(lambda x: sun_time(x.date, x.geometry), axis=1)
daily_summary[["sunrise", "sunset"]] = daily_summary.apply(
lambda x: sun_time(x.date, x.geometry), axis=1
)
daily_summary["day_distance"] = 0.0
daily_summary["night_distance"] = 0.0
daily_summary = daily_summary.set_index("date")

gdf.apply(
lambda x: calculate_day_night_distance(x.date, x.segment_start, x.segment_end, x.dist_meters, daily_summary),
lambda x: calculate_day_night_distance(
x.date, x.segment_start, x.segment_end, x.dist_meters, daily_summary
),
axis=1,
)

daily_summary["night_day_ratio"] = daily_summary["night_distance"] / daily_summary["day_distance"]
mean_night_day_ratio = daily_summary["night_day_ratio"].replace([np.inf, -np.inf], np.nan).dropna().mean()
daily_summary["night_day_ratio"] = (
daily_summary["night_distance"] / daily_summary["day_distance"]
)
mean_night_day_ratio = (
daily_summary["night_day_ratio"]
.replace([np.inf, -np.inf], np.nan)
.dropna()
.mean()
)
return mean_night_day_ratio
29 changes: 22 additions & 7 deletions ecoscope/analysis/classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,24 +80,33 @@ def apply_classification(
Returns:
The input dataframe with a classification column appended.
"""
assert input_column_name in dataframe.columns, "input column must exist on dataframe"
assert (
input_column_name in dataframe.columns
), "input column must exist on dataframe"
if not output_column_name:
output_column_name = f"{input_column_name}_classified"

classifier_class = classification_methods.get(scheme)

if not classifier_class:
raise ValueError(f"Invalid classification scheme. Choose from: {list(classification_methods.keys())}")
raise ValueError(
f"Invalid classification scheme. Choose from: {list(classification_methods.keys())}"
)

classifier = classifier_class(dataframe[input_column_name].to_numpy(), **kwargs)
if labels is None:
labels = classifier.bins

if label_ranges:
# We could do this using mapclassify.get_legend_classes, but this generates a cleaner label
ranges = [f"{dataframe[input_column_name].min():.{label_decimals}f} - {labels[0]:.{label_decimals}f}"]
ranges = [
f"{dataframe[input_column_name].min():.{label_decimals}f} - {labels[0]:.{label_decimals}f}"
]
ranges.extend(
[f"{labels[i]:.{label_decimals}f} - {labels[i + 1]:.{label_decimals}f}" for i in range(len(labels) - 1)]
[
f"{labels[i]:.{label_decimals}f} - {labels[i + 1]:.{label_decimals}f}"
for i in range(len(labels) - 1)
]
)
labels = ranges
else:
Expand All @@ -124,11 +133,15 @@ def apply_color_map(dataframe, input_column_name, cmap, output_column_name=None)
Returns:
The input dataframe with a color map appended.
"""
assert input_column_name in dataframe.columns, "input column must exist on dataframe"
assert (
input_column_name in dataframe.columns
), "input column must exist on dataframe"

if isinstance(cmap, list):
nunique = dataframe[input_column_name].nunique()
assert len(cmap) >= nunique, f"cmap list must contain at least as many values as unique in {input_column_name}"
assert (
len(cmap) >= nunique
), f"cmap list must contain at least as many values as unique in {input_column_name}"
cmap = [hex_to_rgba(x) for x in cmap]
cmap = pd.Series(cmap[:nunique], index=dataframe[input_column_name].unique())
if isinstance(cmap, str):
Expand Down Expand Up @@ -157,5 +170,7 @@ def apply_color_map(dataframe, input_column_name, cmap, output_column_name=None)
if not output_column_name:
output_column_name = f"{input_column_name}_colormap"

dataframe[output_column_name] = [cmap[classification] for classification in dataframe[input_column_name]]
dataframe[output_column_name] = [
cmap[classification] for classification in dataframe[input_column_name]
]
return dataframe
77 changes: 57 additions & 20 deletions ecoscope/analysis/ecograph.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@ class Ecograph:
The number of steps used to compute the two tortuosity metrics (Default : 3 steps)
"""

def __init__(self, trajectory, resolution=15, radius=2, cutoff=None, tortuosity_length=3):
def __init__(
self, trajectory, resolution=15, radius=2, cutoff=None, tortuosity_length=3
):
self.graphs = {}
self.trajectory = trajectory
self.resolution = ceil(resolution)
Expand All @@ -62,8 +64,12 @@ def __init__(self, trajectory, resolution=15, radius=2, cutoff=None, tortuosity_
]
geom = self.trajectory["geometry"]

eastings = np.array([geom.iloc[i].coords.xy[0] for i in range(len(geom))]).flatten()
northings = np.array([geom.iloc[i].coords.xy[1] for i in range(len(geom))]).flatten()
eastings = np.array(
[geom.iloc[i].coords.xy[0] for i in range(len(geom))]
).flatten()
northings = np.array(
[geom.iloc[i].coords.xy[1] for i in range(len(geom))]
).flatten()

self.xmin = floor(np.min(eastings)) - self.resolution
self.ymin = floor(np.min(northings)) - self.resolution
Expand All @@ -73,7 +79,9 @@ def __init__(self, trajectory, resolution=15, radius=2, cutoff=None, tortuosity_
self.xmax += self.resolution - ((self.xmax - self.xmin) % self.resolution)
self.ymax += self.resolution - ((self.ymax - self.ymin) % self.resolution)

self.transform = Affine(self.resolution, 0.00, self.xmin, 0.00, -self.resolution, self.ymax)
self.transform = Affine(
self.resolution, 0.00, self.xmin, 0.00, -self.resolution, self.ymax
)
self.inverse_transform = ~self.transform

self.n_rows = int((self.xmax - self.xmin) // self.resolution)
Expand Down Expand Up @@ -107,7 +115,9 @@ def to_csv(self, output_path):
df[feature].append(G.nodes[node][feature])
(pd.DataFrame.from_dict(df)).to_csv(output_path, index=False)

def to_geotiff(self, feature, output_path, individual="all", interpolation=None, transform=None):
def to_geotiff(
self, feature, output_path, individual="all", interpolation=None, transform=None
):
"""
Saves a specific node feature as a GeoTIFF
Expand All @@ -131,17 +141,19 @@ def to_geotiff(self, feature, output_path, individual="all", interpolation=None,
if individual == "all":
feature_ndarray = self._get_feature_mosaic(feature, interpolation)
elif individual in self.graphs.keys():
feature_ndarray = self._get_feature_map(feature, individual, interpolation)
feature_ndarray = self._get_feature_map(
feature, individual, interpolation
)
else:
raise ValueError("This individual is not in the dataset")
else:
raise ValueError("This feature was not computed by EcoGraph")

if isinstance(transform, sklearn.base.TransformerMixin):
nan_mask = ~np.isnan(feature_ndarray)
feature_ndarray[nan_mask] = transform.fit_transform(feature_ndarray[nan_mask].reshape(-1, 1)).reshape(
feature_ndarray[nan_mask].shape
)
feature_ndarray[nan_mask] = transform.fit_transform(
feature_ndarray[nan_mask].reshape(-1, 1)
).reshape(feature_ndarray[nan_mask].shape)

raster_profile = ecoscope.io.raster.RasterProfile(
pixel_size=self.resolution,
Expand All @@ -158,7 +170,9 @@ def to_geotiff(self, feature, output_path, individual="all", interpolation=None,
**raster_profile,
)

def _get_ecograph(self, trajectory_gdf, individual_name, radius, cutoff, tortuosity_length):
def _get_ecograph(
self, trajectory_gdf, individual_name, radius, cutoff, tortuosity_length
):
G = nx.Graph()
geom = trajectory_gdf["geometry"]
for i in range(len(geom) - (tortuosity_length - 1)):
Expand All @@ -175,7 +189,12 @@ def _get_ecograph(self, trajectory_gdf, individual_name, radius, cutoff, tortuos

t = step_attributes["segment_start"]
seconds_in_day = 24 * 60 * 60
seconds_past_midnight = (t.hour * 3600) + (t.minute * 60) + t.second + (t.microsecond / 1000000.0)
seconds_past_midnight = (
(t.hour * 3600)
+ (t.minute * 60)
+ t.second
+ (t.microsecond / 1000000.0)
)
time_diff = pd.to_datetime(
trajectory_gdf.iloc[i + (tortuosity_length - 1)]["segment_end"]
) - pd.to_datetime(t)
Expand Down Expand Up @@ -252,19 +271,27 @@ def _get_dot_product(x, y, z, w):

def _get_tortuosities(self, lines, time_delta):
point1, point2 = lines[0][0], lines[len(lines) - 1][1]
beeline_dist = np.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2)
beeline_dist = np.sqrt(
(point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2
)
total_length = 0
for i in range(len(lines) - 1):
point1, point2, point3 = lines[i][0], lines[i][1], lines[i + 1][0]
if (floor(point2[0]) == floor(point3[0])) and (floor(point2[1]) == floor(point3[1])):
total_length += ((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2) ** 0.5
if (floor(point2[0]) == floor(point3[0])) and (
floor(point2[1]) == floor(point3[1])
):
total_length += (
(point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2
) ** 0.5
else:
if beeline_dist != 0:
return np.nan, np.log(time_delta / (beeline_dist**2))
else:
return np.nan, np.nan
point1, point2 = lines[len(lines) - 1][0], lines[len(lines) - 1][1]
total_length += ((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2) ** 0.5
total_length += (
(point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2
) ** 0.5

if (total_length != 0) and (beeline_dist != 0):
return (beeline_dist / total_length), np.log(time_delta / (beeline_dist**2))
Expand All @@ -287,7 +314,9 @@ def _compute_degree(G):

def _compute_collective_influence(self, G, radius):
for node in G.nodes():
G.nodes[node]["collective_influence"] = self._get_collective_influence(G, node, radius)
G.nodes[node]["collective_influence"] = self._get_collective_influence(
G, node, radius
)

@staticmethod
def _get_collective_influence(G, start, radius):
Expand Down Expand Up @@ -326,19 +355,25 @@ def _get_feature_mosaic(self, feature, interpolation=None):

def _get_feature_map(self, feature, individual, interpolation):
if interpolation is not None:
return self._get_interpolated_feature_map(feature, individual, interpolation)
return self._get_interpolated_feature_map(
feature, individual, interpolation
)
else:
return self._get_regular_feature_map(feature, individual)

def _get_regular_feature_map(self, feature, individual):
feature_ndarray = np.full((self.n_cols, self.n_rows), np.nan)
for node in self.graphs[individual].nodes():
feature_ndarray[node[1]][node[0]] = (self.graphs[individual]).nodes[node][feature]
feature_ndarray[node[1]][node[0]] = (self.graphs[individual]).nodes[node][
feature
]
return feature_ndarray

def _get_interpolated_feature_map(self, feature, individual, interpolation):
feature_ndarray = self._get_regular_feature_map(feature, individual)
individual_trajectory = self.trajectory[self.trajectory["groupby_col"] == individual]
individual_trajectory = self.trajectory[
self.trajectory["groupby_col"] == individual
]
geom = individual_trajectory["geometry"]
idxs_dict = {}
for i in range(len(geom)):
Expand All @@ -365,7 +400,9 @@ def _get_interpolated_feature_map(self, feature, individual, interpolation):
elif interpolation == "min":
feature_ndarray[key[0], key[1]] = np.min(value)
else:
raise NotImplementedError("Interpolation type not supported by EcoGraph")
raise NotImplementedError(
"Interpolation type not supported by EcoGraph"
)
return feature_ndarray


Expand Down
4 changes: 3 additions & 1 deletion ecoscope/analysis/feature_density.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,7 @@ def clip_density(cell):
raise ValueError("Unsupported geometry type")

grid["density"] = grid.geometry.apply(clip_density)
grid["density"] = grid["density"].replace(0, np.nan) # Set 0's to nan so they don't draw on map
grid["density"] = grid["density"].replace(
0, np.nan
) # Set 0's to nan so they don't draw on map
return grid
Loading

0 comments on commit 76308fc

Please sign in to comment.