Ich entwickle eine Kotlin-Multiplattform-App. Im gemeinsam genutzten Modul möchte ich das Numpy-Paket verwenden, um einige .pkl-Dateien zu bearbeiten.
Ich folge dem Setup, das im offiziellen Setup-Link von Kotlin erwähnt wurde.
Bisher habe ich Folgendes getan:
Project-setting.gradle.kts
plugins {
// this is necessary to avoid the plugins to be loaded multiple times
// in each subproject's classloader
alias(libs.plugins.androidApplication) apply false
alias(libs.plugins.androidLibrary) apply false
alias(libs.plugins.composeMultiplatform) apply false
alias(libs.plugins.composeCompiler) apply false
alias(libs.plugins.kotlinMultiplatform) apply false
id("com.chaquo.python") version "16.0.0" apply false
}
Warning: Failed to compile to .pyc format: [/usr/local/bin/python3] does not appear to be a valid Python command. See https://chaquo.com/chaquopy/doc/current/android.html#android-bytecode
Ich habe versucht, über das Terminal auf Python3 zuzugreifen, es funktioniert gut.
Wenn ich versuche, Python in die Klasse des gemeinsam genutzten Moduls zu importieren, wird es nicht importiert.
Wie kann ich das Problem beheben und auf das Numpy-Paket in meiner KMM-Anwendung zugreifen?
BEARBEITEN: Wenn jemand eine andere Bibliothek oder Problemumgehung vorschlagen möchte, um eine Operation an einer .pkl-Datei in einem KMM durchzuführen Projekt.(iOS&Android).
Oder helfen Sie mir bei der Konvertierung des Python-Skripts in die Kotlin/Multik-Bibliothek.
# -*- coding: utf-8 -*-
import numpy as np
import pickle
def seg_stride(sig, dz, f_start, est_t):
"""! Estimate stride start and end for walking segment
@param sig Moving standard deviation of heel
@param dz Depth difference between heels
@param f_start Start frame of walking segment
@param est_t Estimated duration of one stride
@return fp_lr Estimated frames for stride [start, end]
"""
cur_len = len(sig)
m_thres = np.mean(sig[sig < np.median(sig)]) # get the mean for values less than median
[idx] = np.where(np.diff((sig < m_thres) * 1) > 0) # the search start point for each stride, where the heel z goes below threshold
# Filter idx such that there is only one in each stride
# upward zero-crossings to nearest time step
[upcross] = np.where((dz[:-1] 0))
# downward zero-crossings
[downcross] = np.where((dz[:-1] >= 0) & (dz[1:] < 0))
if len(upcross) >= len(downcross):
cross = upcross
else:
cross = downcross
cross = np.concatenate([[0], cross, [cur_len]])
filtered_idx = []
# Iterate through the intervals in downcross
for i in range(len(cross) - 1):
start = cross[i]
end = cross[i + 1]
# Find elements in idx that are in the interval (start, end)
elements_in_interval = idx[(idx >= start) & (idx < end)]
# If there is exactly one element, keep it; otherwise, keep the smallest element
if len(elements_in_interval) >= 1:
filtered_idx.append(elements_in_interval[0])
# Convert filtered_idx back to a numpy array
filtered_idx = np.array(filtered_idx)
num_fp = len(filtered_idx) # no. of strides
if num_fp > 1:
fp = np.zeros((num_fp))
# refine and record the corresponding frame
for k in range(num_fp):
cur_t = filtered_idx[k]
cur_sig = sig[cur_t : min(cur_len, cur_t + round(0.25 * est_t))] # search space for one stride, start point to 1/4 of estimated stride or end of vid
# locate the local minimum
s_idx = np.argmin(cur_sig)
t2 = min(cur_len, cur_t + s_idx)
t1 = max(0, cur_t - round(0.2 * est_t)) # what is this for?
t_idx = np.argmax(dz[t1 : t2 + 1]) # checking step to see if there is an earlier peak?
fp[k] = f_start + t1 + t_idx
# set fp_left: each row is [start frame, end frame] of a valid stride
fp_lr = np.zeros((num_fp - 1, 2), dtype=int)
for k in range(num_fp - 1):
fp_lr[k, 0] = int(fp[k])
fp_lr[k, 1] = int(fp[k + 1])
else:
fp_lr = np.empty((0, 2), dtype=int)
return fp_lr
def dist2plane_new(points, ground_plane):
"""! Project 3D points to plane and compute shortest distance
@param points Input 3D points
@param ground_plane ground plane parameters (a,b,c,d)
@return projected_points Projected points on ground plane
@return distance Shortest distance between points and plane
"""
a, b, c, d = ground_plane
projected_points = []
distance = []
for point in points:
s, u, v = point
t = (d - a * s - b * u - c * v) / (a * a + b * b + c * c)
# point closest to plane
x = s + a * t
y = u + b * t
z = v + c * t
dist = abs(a * s + b * u + c * z + d) / (np.sqrt(a * a + b * b + c * c))
projected_points.append([x, y, z])
distance.append(dist)
return np.array(projected_points), np.array(distance)
def refine_stride_boundary(angle, foot_on, cur_t, c_margin, nb_frames):
# set local segment
t1 = max(0, cur_t - c_margin)
t2 = min(nb_frames - 1, cur_t + c_margin)
# refine the start of stride by finding the largest decrease in heel angle
idx = local_argmin_diff(angle, t1, t2)
t = t1 + idx
# further refine by ensuring foot is on ground at t
if t not in foot_on:
# if foot not on ground, adjust to next frame when foot is on ground
next_foot_on = foot_on[(foot_on - t)>0]
if len(next_foot_on)>0:
t = next_foot_on[0]
return t
def find_boundaries(fp, angle, angle_other, foot_on, est_t, nb_frames):
c_margin = round(est_t / 10)
t = []
# refine stride boundaries (foot on)
for k in range(len(fp)):
# refine start and end of stride
t_start = refine_stride_boundary(angle, foot_on, fp[k, 0], c_margin, nb_frames)
t_end = refine_stride_boundary(angle, foot_on, fp[k, 1], c_margin, nb_frames)
# find foot offs
if t_end >= t_start+4: # if next foot on is at least 4 frames ahead
# find foot off - min angle between start and end of stride
t_fo = t_start + local_argmin(angle, t_start, t_end)
# if current foot off is at least 2 frames ahead of start
if t_fo >= t_start+2:
# find other foot off
to_fo = t_start + local_argmin(angle_other, t_start, t_fo)
t.append([t_start, to_fo, t_fo, t_end])
return t
def combine_left_right(fp1, fp2):
# foot ons
t_strides = [t[0] for t in fp2] + [t[-1] for t in fp2]
t_strides = sorted(set(t_strides))
t_strides = np.array(t_strides)
refined_fp = []
for fp in fp1:
other_foot_on = t_strides[(t_strides>fp[1]) & (t_strides= 0) & (dz[1:] < 0))
# estimate the duration of one stride
diff_up = np.diff(upcross).tolist()
diff_down = np.diff(downcross).tolist()
est_t = np.median(diff_up + diff_down)
print('The estimated duration of one stride is %d frames' % est_t)
# use the 2D-based phase segmentation results to set walking segments (change on 02/07/2023)
# take input list and run in loop for walk-turn-walk (22/01/2024)
fp_left = []
fp_right = []
print("number of complete round of walk-forward, walk-backward: ", len(fp_walk_list))
for fp_walk in fp_walk_list:
f_start1 = fp_walk[0]
f_end1 = fp_walk[1]
f_start2 = fp_walk[2]
f_end2 = fp_walk[3]
# for 1st walking segment (walk towards camera)
# for left foot
sig1 = left_heel[f_start1 : f_end1 + 1, 2] # relative-depth (05/Oct/2023)
dz1 = dz[f_start1 : f_end1 + 1]
fp1_left = seg_stride(sig1, dz1, f_start1, est_t)
# for right foot
sig2 = right_heel[f_start1 : f_end1 + 1, 2] # relative-depth (05/Oct/2023)
dz2 = -dz1
fp1_right = seg_stride(sig2, dz2, f_start1, est_t)
# for 2nd walking segment (walk away from camera)
# for left foot
sig1 = -left_heel[f_start2 : f_end2 + 1, 2] # add negative
dz1 = -dz[f_start2 : f_end2 + 1]
fp2_left = seg_stride(sig1, dz1, f_start2, est_t)
# for right foot
sig2 = -right_heel[f_start2 : f_end2 + 1, 2] # relative-depth (05/Oct/2023)
dz2 = -dz1
fp2_right = seg_stride(sig2, dz2, f_start2, est_t)
fp_left.append(fp1_left)
fp_left.append(fp2_left)
fp_right.append(fp1_right)
fp_right.append(fp2_right)
#print('Finished for loop')
fp_left = np.vstack(fp_left)
fp_right = np.vstack(fp_right)
# Further processing to identify single support, double support, and refine stride boundaries
#print('Starting Step3')
# Step 3: Calculate distance to plane and angles between foot and floor
# angle between left foot and floor plane
[pp1, dz1] = dist2plane_new(left_heel, ground_plane)
[pp2, dz2] = dist2plane_new(left_toe, ground_plane)
perpendicular = dz2 - dz1 # height difference between left heel and toe relative to ground plane
base = np.sqrt(np.sum(np.power(pp2 - pp1, 2), 1)) # distance between left heel and left toe
left_angle = np.arctan2(perpendicular, base)
left_angle = left_angle * 180 / np.pi # convert to degree
# angle between right foot and floor plane
[pp1, dz1] = dist2plane_new(right_heel, ground_plane)
[pp2, dz2] = dist2plane_new(right_toe, ground_plane)
perpendicular = dz2 - dz1
base = np.sqrt(np.sum(np.power(pp2 - pp1, 2), 1))
right_angle = np.arctan2(perpendicular, base)
right_angle = right_angle * 180 / np.pi # convert to degree
# distance between ankles and floor
[ppl, dzl] = dist2plane_new(left_heel, ground_plane)
[ppr, dzr] = dist2plane_new(right_heel, ground_plane)
# compute when each foot is on ground
left_foot_on = np.where(dzl < dzr)[0]
right_foot_on = np.where(dzr < dzl)[0]
#print('Starting Step4')
# Step 4: refine boundaries of each stride and further divide into
# double support (DS1), single support, and double support (DS2)
# refine stride boundaries (foot on) and find foot offs
t_left = find_boundaries(fp_left, left_angle, right_angle, left_foot_on, est_t, nb_frames)
t_right = find_boundaries(fp_right, right_angle, left_angle, right_foot_on, est_t, nb_frames)
# combine left and right times
t_left = combine_left_right(t_left, t_right)
t_right = combine_left_right(t_right, t_left)
return t_left, t_right # , stride_left, stride_right, output
def local_argmin(sig, start, end):
"""! Find local minimum index
@param sig Signal
@return Local minimum index
"""
if end
Ich entwickle eine Kotlin-Multiplattform-App. Im gemeinsam genutzten Modul möchte ich das Numpy-Paket verwenden, um einige .pkl-Dateien zu bearbeiten. Ich folge dem Setup, das im offiziellen Setup-Link von Kotlin erwähnt wurde. Bisher habe ich Folgendes getan: Project-setting.gradle.kts [code]rootProject.name = "Assignment" enableFeaturePreview("TYPESAFE_PROJECT_ACCESSORS")
include(":composeApp") include(":shared") [/code] Projekt-> build.gradle.kts [code] plugins { // this is necessary to avoid the plugins to be loaded multiple times // in each subproject's classloader alias(libs.plugins.androidApplication) apply false alias(libs.plugins.androidLibrary) apply false alias(libs.plugins.composeMultiplatform) apply false alias(libs.plugins.composeCompiler) apply false alias(libs.plugins.kotlinMultiplatform) apply false id("com.chaquo.python") version "16.0.0" apply false } [/code] Und das Shared-> build.gradle.kts [code] import org.jetbrains.kotlin.gradle.ExperimentalKotlinGradlePluginApi import org.jetbrains.kotlin.gradle.dsl.JvmTarget
android { namespace = "com.assignment.shared" compileSdk = libs.versions.android.compileSdk.get().toInt() compileOptions { sourceCompatibility = JavaVersion.VERSION_11 targetCompatibility = JavaVersion.VERSION_11 } defaultConfig { minSdk = libs.versions.android.minSdk.get().toInt() ndk { // On Apple silicon, you can omit x86_64. abiFilters += listOf("arm64-v8a") } } } chaquopy { defaultConfig { buildPython ("/usr/local/bin/python3") } productFlavors { } sourceSets { } } [/code] Dies sind die Konfigurationsdateien. Die erste Warnung, die ich bekomme, ist [code]Warning: Failed to compile to .pyc format: [/usr/local/bin/python3] does not appear to be a valid Python command. See https://chaquo.com/chaquopy/doc/current/android.html#android-bytecode [/code] Ich habe versucht, über das Terminal auf Python3 zuzugreifen, es funktioniert gut. Wenn ich versuche, Python in die Klasse des gemeinsam genutzten Moduls zu importieren, wird es nicht importiert. Wie kann ich das Problem beheben und auf das Numpy-Paket in meiner KMM-Anwendung zugreifen? BEARBEITEN: Wenn jemand eine andere Bibliothek oder Problemumgehung vorschlagen möchte, um eine Operation an einer .pkl-Datei in einem KMM durchzuführen Projekt.(iOS&Android). Oder helfen Sie mir bei der Konvertierung des Python-Skripts in die Kotlin/Multik-Bibliothek. [code] # -*- coding: utf-8 -*-
import numpy as np import pickle
def seg_stride(sig, dz, f_start, est_t): """! Estimate stride start and end for walking segment
@param sig Moving standard deviation of heel @param dz Depth difference between heels @param f_start Start frame of walking segment @param est_t Estimated duration of one stride @return fp_lr Estimated frames for stride [start, end] """ cur_len = len(sig) m_thres = np.mean(sig[sig < np.median(sig)]) # get the mean for values less than median [idx] = np.where(np.diff((sig < m_thres) * 1) > 0) # the search start point for each stride, where the heel z goes below threshold
# Filter idx such that there is only one in each stride # upward zero-crossings to nearest time step [upcross] = np.where((dz[:-1] 0)) # downward zero-crossings [downcross] = np.where((dz[:-1] >= 0) & (dz[1:] < 0)) if len(upcross) >= len(downcross): cross = upcross else: cross = downcross cross = np.concatenate([[0], cross, [cur_len]]) filtered_idx = []
# Iterate through the intervals in downcross for i in range(len(cross) - 1): start = cross[i] end = cross[i + 1] # Find elements in idx that are in the interval (start, end) elements_in_interval = idx[(idx >= start) & (idx < end)] # If there is exactly one element, keep it; otherwise, keep the smallest element if len(elements_in_interval) >= 1: filtered_idx.append(elements_in_interval[0])
# Convert filtered_idx back to a numpy array filtered_idx = np.array(filtered_idx)
num_fp = len(filtered_idx) # no. of strides
if num_fp > 1: fp = np.zeros((num_fp)) # refine and record the corresponding frame for k in range(num_fp): cur_t = filtered_idx[k] cur_sig = sig[cur_t : min(cur_len, cur_t + round(0.25 * est_t))] # search space for one stride, start point to 1/4 of estimated stride or end of vid
# locate the local minimum s_idx = np.argmin(cur_sig) t2 = min(cur_len, cur_t + s_idx) t1 = max(0, cur_t - round(0.2 * est_t)) # what is this for? t_idx = np.argmax(dz[t1 : t2 + 1]) # checking step to see if there is an earlier peak? fp[k] = f_start + t1 + t_idx
# set fp_left: each row is [start frame, end frame] of a valid stride fp_lr = np.zeros((num_fp - 1, 2), dtype=int) for k in range(num_fp - 1): fp_lr[k, 0] = int(fp[k]) fp_lr[k, 1] = int(fp[k + 1])
else: fp_lr = np.empty((0, 2), dtype=int)
return fp_lr
def dist2plane_new(points, ground_plane): """! Project 3D points to plane and compute shortest distance
@param points Input 3D points @param ground_plane ground plane parameters (a,b,c,d) @return projected_points Projected points on ground plane @return distance Shortest distance between points and plane """
a, b, c, d = ground_plane
projected_points = [] distance = [] for point in points: s, u, v = point t = (d - a * s - b * u - c * v) / (a * a + b * b + c * c)
# point closest to plane x = s + a * t y = u + b * t z = v + c * t
dist = abs(a * s + b * u + c * z + d) / (np.sqrt(a * a + b * b + c * c)) projected_points.append([x, y, z]) distance.append(dist)
# refine the start of stride by finding the largest decrease in heel angle idx = local_argmin_diff(angle, t1, t2) t = t1 + idx
# further refine by ensuring foot is on ground at t if t not in foot_on: # if foot not on ground, adjust to next frame when foot is on ground next_foot_on = foot_on[(foot_on - t)>0] if len(next_foot_on)>0: t = next_foot_on[0]
# refine stride boundaries (foot on) for k in range(len(fp)): # refine start and end of stride t_start = refine_stride_boundary(angle, foot_on, fp[k, 0], c_margin, nb_frames) t_end = refine_stride_boundary(angle, foot_on, fp[k, 1], c_margin, nb_frames)
# find foot offs if t_end >= t_start+4: # if next foot on is at least 4 frames ahead # find foot off - min angle between start and end of stride t_fo = t_start + local_argmin(angle, t_start, t_end) # if current foot off is at least 2 frames ahead of start if t_fo >= t_start+2: # find other foot off to_fo = t_start + local_argmin(angle_other, t_start, t_fo) t.append([t_start, to_fo, t_fo, t_end])
return t
def combine_left_right(fp1, fp2): # foot ons t_strides = [t[0] for t in fp2] + [t[-1] for t in fp2] t_strides = sorted(set(t_strides)) t_strides = np.array(t_strides) refined_fp = [] for fp in fp1: other_foot_on = t_strides[(t_strides>fp[1]) & (t_strides= 0) & (dz[1:] < 0))
# estimate the duration of one stride diff_up = np.diff(upcross).tolist() diff_down = np.diff(downcross).tolist() est_t = np.median(diff_up + diff_down) print('The estimated duration of one stride is %d frames' % est_t)
# use the 2D-based phase segmentation results to set walking segments (change on 02/07/2023) # take input list and run in loop for walk-turn-walk (22/01/2024) fp_left = [] fp_right = [] print("number of complete round of walk-forward, walk-backward: ", len(fp_walk_list)) for fp_walk in fp_walk_list: f_start1 = fp_walk[0] f_end1 = fp_walk[1] f_start2 = fp_walk[2] f_end2 = fp_walk[3]
# for 1st walking segment (walk towards camera) # for left foot sig1 = left_heel[f_start1 : f_end1 + 1, 2] # relative-depth (05/Oct/2023) dz1 = dz[f_start1 : f_end1 + 1] fp1_left = seg_stride(sig1, dz1, f_start1, est_t)
# Further processing to identify single support, double support, and refine stride boundaries #print('Starting Step3') # Step 3: Calculate distance to plane and angles between foot and floor
# angle between left foot and floor plane [pp1, dz1] = dist2plane_new(left_heel, ground_plane) [pp2, dz2] = dist2plane_new(left_toe, ground_plane) perpendicular = dz2 - dz1 # height difference between left heel and toe relative to ground plane base = np.sqrt(np.sum(np.power(pp2 - pp1, 2), 1)) # distance between left heel and left toe left_angle = np.arctan2(perpendicular, base) left_angle = left_angle * 180 / np.pi # convert to degree
# angle between right foot and floor plane [pp1, dz1] = dist2plane_new(right_heel, ground_plane) [pp2, dz2] = dist2plane_new(right_toe, ground_plane) perpendicular = dz2 - dz1 base = np.sqrt(np.sum(np.power(pp2 - pp1, 2), 1)) right_angle = np.arctan2(perpendicular, base) right_angle = right_angle * 180 / np.pi # convert to degree
# distance between ankles and floor [ppl, dzl] = dist2plane_new(left_heel, ground_plane) [ppr, dzr] = dist2plane_new(right_heel, ground_plane) # compute when each foot is on ground left_foot_on = np.where(dzl < dzr)[0] right_foot_on = np.where(dzr < dzl)[0]
#print('Starting Step4') # Step 4: refine boundaries of each stride and further divide into # double support (DS1), single support, and double support (DS2)
Wie führe ich einen benutzerdefinierten Registerzuordner in LLVM mit dem llc-Compiler aus dem Baum aus?
Ich habe einen benutzerdefinierten Registerzuordner geschrieben, der aufruft
......
Ich habe mehrere Downstream-Clients, die ein einzelnes Kartenfeld nutzen. Das Kartenfeld ist groß und kann bei unbeabsichtigtem Kopieren zu zusätzlichen Leistungsproblemen führen.
message...
Ich versuche, die Compose Multiplatform-Benutzeroberfläche in einer iOS-Freigabeerweiterung zu verwenden, aber der Compose-Inhalt zeigt nur einen leeren weißen Bildschirm an. Folgendes habe ich...
Wenn ich mir die Chaquopy-Website ansehe, heißt es, dass es mit React Native funktioniert. Kann jemand darauf hinweisen, wie das möglich wäre? Gibt es eine Möglichkeit, eine Python-Skriptausgabe in...