Giter VIP home page Giter VIP logo

openni-python's Introduction

OpenNI2/NiTE2 Python Bindings

Python bindings for OpenNI2 and NiTE2.

Based on Primesense original bindings, with the bugs (well, at least some of them) fixed.

Example of NiTE2 usage:

import sys
from openni import openni2, nite2, utils

openni2.initialize()
nite2.initialize()

dev = openni2.Device.open_any()

try:
    userTracker = nite2.UserTracker(dev)
except utils.NiteError as ne:
    logger.error("Unable to start the NiTE human tracker. Check "
                 "the error messages in the console. Model data "
                 "(s.dat, h.dat...) might be inaccessible.")
    sys.exit(-1)

while True:

    frame = userTracker.read_frame()

    if frame.users:
        for user in frame.users:
            if user.is_new():
                print("New human detected! Calibrating...")
                userTracker.start_skeleton_tracking(user.id)
            elif user.skeleton.state == nite2.SkeletonState.NITE_SKELETON_TRACKED:
                head = user.skeleton.joints[nite2.JointType.NITE_JOINT_HEAD]

                confidence = head.positionConfidence
                print("Head: (x:%dmm, y:%dmm, z:%dmm), confidence: %.2f" % (
                                                                    head.position.x,
                                                                    head.position.y,
                                                                    head.position.z,
                                                                    confidence))

nite2.unload()
openni2.unload()

Another example of NiTE2. Display skeletons over the depth stream.

import sys
import argparse
from openni import openni2, nite2, utils
import numpy as np
import cv2

GRAY_COLOR = (64, 64, 64)
CAPTURE_SIZE_KINECT = (512, 424)
CAPTURE_SIZE_OTHERS = (640, 480)


def parse_arg():
    parser = argparse.ArgumentParser(description='Test OpenNI2 and NiTE2.')
    parser.add_argument('-w', '--window_width', type=int, default=1024,
                        help='Specify the window width.')
    return parser.parse_args()


def draw_limb(img, ut, j1, j2, col):
    (x1, y1) = ut.convert_joint_coordinates_to_depth(j1.position.x, j1.position.y, j1.position.z)
    (x2, y2) = ut.convert_joint_coordinates_to_depth(j2.position.x, j2.position.y, j2.position.z)

    if (0.4 < j1.positionConfidence and 0.4 < j2.positionConfidence):
        c = GRAY_COLOR if (j1.positionConfidence < 1.0 or j2.positionConfidence < 1.0) else col
        cv2.line(img, (int(x1), int(y1)), (int(x2), int(y2)), c, 1)

        c = GRAY_COLOR if (j1.positionConfidence < 1.0) else col
        cv2.circle(img, (int(x1), int(y1)), 2, c, -1)

        c = GRAY_COLOR if (j2.positionConfidence < 1.0) else col
        cv2.circle(img, (int(x2), int(y2)), 2, c, -1)


def draw_skeleton(img, ut, user, col):
    for idx1, idx2 in [(nite2.JointType.NITE_JOINT_HEAD, nite2.JointType.NITE_JOINT_NECK),
                       # upper body
                       (nite2.JointType.NITE_JOINT_NECK, nite2.JointType.NITE_JOINT_LEFT_SHOULDER),
                       (nite2.JointType.NITE_JOINT_LEFT_SHOULDER, nite2.JointType.NITE_JOINT_TORSO),
                       (nite2.JointType.NITE_JOINT_TORSO, nite2.JointType.NITE_JOINT_RIGHT_SHOULDER),
                       (nite2.JointType.NITE_JOINT_RIGHT_SHOULDER, nite2.JointType.NITE_JOINT_NECK),
                       # left hand
                       (nite2.JointType.NITE_JOINT_LEFT_HAND, nite2.JointType.NITE_JOINT_LEFT_ELBOW),
                       (nite2.JointType.NITE_JOINT_LEFT_ELBOW, nite2.JointType.NITE_JOINT_LEFT_SHOULDER),
                       # right hand
                       (nite2.JointType.NITE_JOINT_RIGHT_HAND, nite2.JointType.NITE_JOINT_RIGHT_ELBOW),
                       (nite2.JointType.NITE_JOINT_RIGHT_ELBOW, nite2.JointType.NITE_JOINT_RIGHT_SHOULDER),
                       # lower body
                       (nite2.JointType.NITE_JOINT_TORSO, nite2.JointType.NITE_JOINT_LEFT_HIP),
                       (nite2.JointType.NITE_JOINT_LEFT_HIP, nite2.JointType.NITE_JOINT_RIGHT_HIP),
                       (nite2.JointType.NITE_JOINT_RIGHT_HIP, nite2.JointType.NITE_JOINT_TORSO),
                       # left leg
                       (nite2.JointType.NITE_JOINT_LEFT_FOOT, nite2.JointType.NITE_JOINT_LEFT_KNEE),
                       (nite2.JointType.NITE_JOINT_LEFT_KNEE, nite2.JointType.NITE_JOINT_LEFT_HIP),
                       # right leg
                       (nite2.JointType.NITE_JOINT_RIGHT_FOOT, nite2.JointType.NITE_JOINT_RIGHT_KNEE),
                       (nite2.JointType.NITE_JOINT_RIGHT_KNEE, nite2.JointType.NITE_JOINT_RIGHT_HIP)]:
        draw_limb(img, ut, user.skeleton.joints[idx1], user.skeleton.joints[idx2], col)


# -------------------------------------------------------------
# main program from here
# -------------------------------------------------------------

def init_capture_device():

    openni2.initialize()
    nite2.initialize()
    return openni2.Device.open_any()


def close_capture_device():
    nite2.unload()
    openni2.unload()


def capture_skeleton():
    args = parse_arg()
    dev = init_capture_device()

    dev_name = dev.get_device_info().name.decode('UTF-8')
    print("Device Name: {}".format(dev_name))
    use_kinect = False
    if dev_name == 'Kinect':
        use_kinect = True
        print('using Kinect.')

    try:
        user_tracker = nite2.UserTracker(dev)
    except utils.NiteError:
        print("Unable to start the NiTE human tracker. Check "
              "the error messages in the console. Model data "
              "(s.dat, h.dat...) might be inaccessible.")
        sys.exit(-1)

    (img_w, img_h) = CAPTURE_SIZE_KINECT if use_kinect else CAPTURE_SIZE_OTHERS
    win_w = args.window_width
    win_h = int(img_h * win_w / img_w)

    while True:
        ut_frame = user_tracker.read_frame()

        depth_frame = ut_frame.get_depth_frame()
        depth_frame_data = depth_frame.get_buffer_as_uint16()
        img = np.ndarray((depth_frame.height, depth_frame.width), dtype=np.uint16,
                         buffer=depth_frame_data).astype(np.float32)
        if use_kinect:
            img = img[0:img_h, 0:img_w]

        (min_val, max_val, min_loc, max_loc) = cv2.minMaxLoc(img)
        if (min_val < max_val):
            img = (img - min_val) / (max_val - min_val)
        img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)

        if ut_frame.users:
            for user in ut_frame.users:
                if user.is_new():
                    print("new human id:{} detected.".format(user.id))
                    user_tracker.start_skeleton_tracking(user.id)
                elif (user.state == nite2.UserState.NITE_USER_STATE_VISIBLE and
                      user.skeleton.state == nite2.SkeletonState.NITE_SKELETON_TRACKED):
                    draw_skeleton(img, user_tracker, user, (255, 0, 0))

        cv2.imshow("Depth", cv2.resize(img, (win_w, win_h)))
        if (cv2.waitKey(1) & 0xFF == ord('q')):
            break

    close_capture_device()


if __name__ == '__main__':
    capture_skeleton()

Captured screen image for above example.

openni-python's People

Contributors

hjmr avatar jflesch avatar severin-lemaignan avatar skadge avatar

Stargazers

 avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar

Watchers

 avatar  avatar  avatar  avatar  avatar  avatar  avatar  avatar

openni-python's Issues

Nite2.2 not working

Hi using the pypi version but cannot get Nite2 (I have Nite2.2) to work. Using the example in the README.md, it always goes through the except path:

import sys
from openni import openni2, nite2, utils

openni2.initialize()
nite2.initialize()

dev = openni2.Device.open_any()

try:
    userTracker = nite2.UserTracker(dev)
except utils.NiteError as ne:
    logger.error("Unable to start the NiTE human tracker. Check "
                 "the error messages in the console. Model data "
                 "(s.dat, h.dat...) might be inaccessible.")
    sys.exit(-1)

DLLNotLoaded

Hello,

I am trying to run the code, but I am getting an error.

raise DLLNotLoaded("DLL is not loaded")
openni.utils.DLLNotLoaded: DLL is not loaded

I assumed that there is some problem with dependencies, so I installed openni2 library, but the problem still persists.

Could you give some more info about prerequisites for that libary?

Recording from Orbbec Astra

Hi,
I'm trying to record depth and color stream from Orbbec Astra model, that support OpenNI2 (using their package). Everything works, but I have to disconnect/reconnect the camera USB otherwise in the next recording the depth stream seems to be not generated by the camera, hence is not stored in the recording file. I suspect the problem is related to close/stop function I use for streams, recorder and device. I tried different combinations without any luck. Same problem both on Ubuntu 16.04 and Win10. You can have more details and the code used here: https://stackoverflow.com/questions/54342497/record-orbbec-astra-streams-with-python-and-openni2

Any hints?
Thank you

Memory leak

If running the example shown in the README.md it won't take long until the System Monitor shows full RAM memory usage and the OS starts to page to the hard drive until it crashes the X windowing system (or other).

Can't open .ONI videos using string filenames

Problem: .ONI files can't be opened neither using the filename as the path neither using the Device.open_file() method.

Sample Code:

from openni import openni2
OPENNI_FOLDER_PATH = r".\OpenNI-Windows-x64-2.3.0.55\Redist" #To your Redist folder
ONI_VIDEO_PATH = '.\\video.oni'

openni2.initialize(OPENNI_FOLDER_PATH)
device = openni2.Device.open_file(ONI_VIDEO_PATH)

Alternative method that also fails:

from openni import openni2
OPENNI_FOLDER_PATH = r".\OpenNI-Windows-x64-2.3.0.55\Redist"
ONI_VIDEO_PATH = '.\\video.oni'

openni2.initialize(OPENNI_FOLDER_PATH)
device = openni2.Device(ONI_VIDEO_PATH)

Error log:

Traceback (most recent call last):
  File "c:\Users\user\.vscode\extensions\ms-python.python-2019.4.12954\pythonFiles\ptvsd_launcher.py", line 43, in <module>
    main(ptvsdArgs)
  File "c:\Users\user\.vscode\extensions\ms-python.python-2019.4.12954\pythonFiles\lib\python\ptvsd\__main__.py", line 410, in main
    run()
  File "c:\Users\user\.vscode\extensions\ms-python.python-2019.4.12954\pythonFiles\lib\python\ptvsd\__main__.py", line 291, in run_file
    runpy.run_path(target, run_name='__main__')
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\runpy.py", line 263, in run_path
    pkg_name=pkg_name, script_name=fname)
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\runpy.py", line 96, in _run_module_code
    mod_name, mod_spec, pkg_name, script_name)
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\runpy.py", line 85, in _run_code
    exec(code, run_globals)
  File "c:\Users\user\Documents\Python Scripts\AstraSProductFlow\test_filter.py", line 6, in <module>
    device = openni2.Device(ONI_VIDEO_PATH)
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\site-packages\openni\openni2.py", line 223, in __init__
    self._reopen()
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\site-packages\openni\openni2.py", line 233, in _reopen
    c_api.oniDeviceOpen(self._orig_uri, ctypes.byref(self._handle))
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\site-packages\openni\_openni2.py", line 2826, in wrapper
    res = func(*args)
  File "C:\Users\user\AppData\Local\Continuum\anaconda3\envs\AstraS\lib\site-packages\openni\_openni2.py", line 2901, in oniDeviceOpen
    return _oniDeviceOpen(uri, pDevice)
ctypes.ArgumentError: argument 1: <class 'TypeError'>: wrong type

Suggested working correction for the open_file method:
File openni2.py line 262
From
return cls(filename)
to
return cls(filename.encode('utf-8'))

Unexposed get_user_map() function

Hi,

Following the UserView example (UserView/Viewer.cpp) example that comes with Nite2.2, I can see there is a way to get the labelled user map from the UserTrackerFrame using:

nite::UserTrackerFrameRef userTrackerFrame;
nite::Status rc = m_pUserTracker->readFrame(&userTrackerFrame);
depthFrame = userTrackerFrame.getDepthFrame();
const nite::UserMap& userLabels = userTrackerFrame.getUserMap();

Similarly, on python I am trying to replicate that particular bit by doing:

frame = userTracker.read_frame()
depth = np.frombuffer(frame.get_depth_frame().get_buffer_as_uint16(), dtype=np.uint16)
depth = depth.reshape(480, 640)
visual_depth = np.uint8((depth/4500.)*255.)
visual_depth = cv2.cvtColor(visual_depth, cv2.COLOR_GRAY2BGR)
# user_map = frame.get_user_map()

However, there is no get_user_map() function in frame, which is of UserTrackerFrame type. Is this a bug (i.e. non-exposure of the get_user_map function) or is there any other way to proceed?

ModuleNotFoundError when importing nite2

When I tried to import nite2, a ModuleNotFoundError occurred:

>>> from openni import nite2
Traceback (most recent call last):
  File "<stdin>", line 1, in <module>
  File "C:\ProgramData\Anaconda3\lib\site-packages\openni-2.2.0.post6-py3.6.egg\openni\nite2.py", line 9, in <module>
  File "C:\ProgramData\Anaconda3\lib\site-packages\openni-2.2.0.post6-py3.6.egg\openni\_nite2.py", line 26, in <module>
ModuleNotFoundError: No module named '_openni2'

My enviroment is Python 3.6.2 :: Anaconda custom (64-bit) under Windows 10. Both the version installed from here(GitHub) and pypi have this problem. Any hints?
Thanks a lot for help.

Depth registration matrix?

Hi, Iโ€˜m working on the depth registration problem. I know that I can access the registered depth image by turning on 'IMAGE_REGISTRATION_DEPTH_TO_COLOR'. However, This is hardware registration, I'm working on doing this registration myself, i.e. get the raw depth and IR image and register both of them to color image (since the hardware registration doesn't provide IR_TO_COLOR registration). I'm wondering if I can read the intrinsics and extrinsics parameters from device flash and how can I do that?

Update PyPI

Hi, thanks for the great work.
I was using the version that is on PyPI here: https://pypi.python.org/pypi/openni/2.2.0.post6
but it seems that the changes relative to the common libraries on Unix didn't go into this release. As a consequence I get the libOpenNI2.so: file does not exist in my local folder.

Would you please update the package on PyPI?

Device not found in Jetson Xavier AGX

Hi,

I am not able to visualize the camera images in my Jetson Xavier AGX. I get the error:
openni.utils.OpenNIError: (OniStatus.ONI_STATUS_ERROR, b'DeviceOpen using default: no devices found', None)

Can you help me?

My camera is a LIPSedge L210u.

Recommend Projects

  • React photo React

    A declarative, efficient, and flexible JavaScript library for building user interfaces.

  • Vue.js photo Vue.js

    ๐Ÿ–– Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.

  • Typescript photo Typescript

    TypeScript is a superset of JavaScript that compiles to clean JavaScript output.

  • TensorFlow photo TensorFlow

    An Open Source Machine Learning Framework for Everyone

  • Django photo Django

    The Web framework for perfectionists with deadlines.

  • D3 photo D3

    Bring data to life with SVG, Canvas and HTML. ๐Ÿ“Š๐Ÿ“ˆ๐ŸŽ‰

Recommend Topics

  • javascript

    JavaScript (JS) is a lightweight interpreted programming language with first-class functions.

  • web

    Some thing interesting about web. New door for the world.

  • server

    A server is a program made to process requests and deliver data to clients.

  • Machine learning

    Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.

  • Game

    Some thing interesting about game, make everyone happy.

Recommend Org

  • Facebook photo Facebook

    We are working to build community through open source technology. NB: members must have two-factor auth.

  • Microsoft photo Microsoft

    Open source projects and samples from Microsoft.

  • Google photo Google

    Google โค๏ธ Open Source for everyone.

  • D3 photo D3

    Data-Driven Documents codes.