Skip to content

Instantly share code, notes, and snippets.

View yoi-hibino's full-sized avatar

Yoi Hibino yoi-hibino

View GitHub Profile
#include <trajectory_msgs/msg/joint_trajectory.h>
rcl_subscription_t subscriber;
trajectory_msgs__msg__JointTrajectory traj_msg;
std::vector<TrajectoryPoint> points;
void traj_callback(const void *msgin) {
const trajectory_msgs__msg__JointTrajectory *msg = (const trajectory_msgs__msg__JointTrajectory *)msgin;
// TrajectoryPointsの受信処理
# spray_path_planner.py
import rclpy
from rclpy.node import Node
import pyrealsense2 as rs
import open3d as o3d
import numpy as np
import tf_transformations
from moveit_commander import RobotCommander, MoveGroupCommander
from geometry_msgs.msg import PoseStamped
@yoi-hibino
yoi-hibino / csm_openai.py
Created March 16, 2025 04:42
Sesame+OpenAI API
from huggingface_hub import hf_hub_download
from huggingface_hub import login
from generator import load_csm_1b
import torchaudio
import sounddevice as sd
import numpy as np
from openai import OpenAI
import os
login("__your_huggingface_access_token_here__")
@yoi-hibino
yoi-hibino / realtime_lidar_locator.py
Created March 16, 2025 02:57
Real-time LiDAR Localization in CAD Environment
import numpy as np
import open3d as o3d
import transforms3d as t3d
import trimesh
import scipy.spatial
from sklearn.neighbors import KDTree
import time
import threading
import queue
import concurrent.futures
@yoi-hibino
yoi-hibino / lidar_locator.py
Last active March 16, 2025 02:37
LiDAR Localization in 3D CAD Environment
import numpy as np
import open3d as o3d
import matplotlib.pyplot as plt
import transforms3d as t3d
import trimesh
import scipy.spatial
class SensorLocalizer:
def __init__(self, cad_model_path, scale_factor=1.0):
"""
@yoi-hibino
yoi-hibino / csm.py
Created March 14, 2025 05:55
CSM Sample
from huggingface_hub import hf_hub_download
from generator import load_csm_1b
import torchaudio
# need this to access to model.
# you also need to get access to https://huggingface.co/meta-llama/Llama-3.2-1B
from huggingface_hub import login
login("your_huggingface_token")
model_path = hf_hub_download(repo_id="sesame/csm-1b", filename="ckpt.pt")
@yoi-hibino
yoi-hibino / multiple_sensor.cpp
Created March 9, 2025 07:03
Multiple IMU fusion
#include <Wire.h>
#include <Arduino.h>
// Define number of IMUs and EKF parameters
#define NUM_IMUS 3 // Number of IMU sensors
#define EKF_N 10 // State vector dimension: [position (3), velocity (3), quaternion (4)]
#define EKF_M 9 // Measurement vector dimension: [accelerometer (3), gyroscope (3), magnetometer (3)]
// Include TinyEKF and set dimensions
#define EKF_N 10
@yoi-hibino
yoi-hibino / optical_flow.py
Created March 3, 2025 17:35
Shock Wave Imaging - Optical Flow Analysis
import cv2
import numpy as np
import matplotlib.pyplot as plt
cap = cv2.VideoCapture('video.mp4')
ret, prev_frame = cap.read()
if not ret:
raise ValueError("Could not read video.")
prev_gray = cv2.cvtColor(prev_frame, cv2.COLOR_BGR2GRAY)
import numpy as np
from filterpy.kalman import ExtendedKalmanFilter
# Assume our state vector x includes only position for simplicity.
# In practice, it would include orientation, velocity, etc.
# Let’s say x[0:3] is the "true" position of the reference point.
# Known sensor offset relative to the reference point
sensor_offset = np.array([0.1, -0.05, 0.0]) # in meters
import matplotlib.pyplot as plt
import numpy as np
import cv2
NACA_0018 =[[1.0000, 0.00000],
[0.9500, 0.01210],
[0.9000, 0.02172],
[0.8000, 0.03935],
[0.7000, 0.05496],