Added ifdefs for android build with current tech stack

This commit is contained in:
n00b
2024-10-13 21:59:13 -04:00
parent 693037ccd6
commit 9509b4fa65
17 changed files with 2326 additions and 55 deletions

View File

@@ -170,12 +170,11 @@ rc_restoreWindow( )
rc_grabInput( GRABINPUT_FLAG )
rc_setWindowAlwaysOnTop( SETWINDOWALWAYSONTOP_FLAG )
rc_setMouseRelative( SETMOUSERELATIVE_FLAG )
rc_setWindowVSync( SETWINDOWVSYNC_FLAG )
rc_flashWindow( FLASHWINDOW_FLAG )
rc_windowIsGrabbed( )
rc_canvasOpen( OPENCANVAS_W, OPENCANVAS_H, OPENCANVAS_VIEWPORT_X, OPENCANVAS_VIEWPORT_Y, OPENCANVAS_VIEWPORT_W, OPENCANVAS_VIEWPORT_H, OPENCANVAS_MODE )
rc_canvasClose( CLOSECANVAS_C_NUM )
rc_canvasOpen3D( OPENCANVAS3D_W, OPENCANVAS3D_H, OPENCANVAS3D_VIEWPORT_X, OPENCANVAS3D_VIEWPORT_Y, OPENCANVAS3D_VIEWPORT_W, OPENCANVAS3D_VIEWPORT_H, OPENCANVAS3D_MODE )
rc_canvasOpen3D( OPENCANVAS3D_VIEWPORT_X, OPENCANVAS3D_VIEWPORT_Y, OPENCANVAS3D_VIEWPORT_W, OPENCANVAS3D_VIEWPORT_H, OPENCANVAS3D_MODE )
rc_setCanvasVisible( SETCANVASVISIBLE_C_NUM, SETCANVASVISIBLE_FLAG )
rc_canvasIsVisible( CANVASISVISIBLE_C_NUM )
rc_setCanvasViewport( SETCANVASVIEWPORT_CNUM, SETCANVASVIEWPORT_X, SETCANVASVIEWPORT_Y, SETCANVASVIEWPORT_W, SETCANVASVIEWPORT_H )
@@ -195,7 +194,7 @@ rc_getCanvasZ( GETCANVASZ_C_NUM )
rc_canvasClip( CANVASCLIP_X, CANVASCLIP_Y, CANVASCLIP_W, CANVASCLIP_H )
rc_activeCanvas( )
rc_setCanvasPhysics2D( SETCANVASPHYSICS2D_C_NUM, SETCANVASPHYSICS2D_STATE )
rc_canvasOpenSpriteLayer( OPENSPRITECANVAS_W, OPENSPRITECANVAS_H, OPENSPRITECANVAS_VIEWPORT_X, OPENSPRITECANVAS_VIEWPORT_Y, OPENSPRITECANVAS_VIEWPORT_W, OPENSPRITECANVAS_VIEWPORT_H )
rc_canvasOpenSpriteLayer( OPENCANVASSPRITELAYER_W, OPENCANVASSPRITELAYER_H, OPENCANVASSPRITELAYER_VIEWPORT_X, OPENCANVASSPRITELAYER_VIEWPORT_Y, OPENCANVASSPRITELAYER_VIEWPORT_W, OPENCANVASSPRITELAYER_VIEWPORT_H )
rc_drawCircle( CIRCLE_X, CIRCLE_Y, CIRCLE_RADIUS )
rc_drawCircleFill( CIRCLEFILL_X, CIRCLEFILL_Y, CIRCLEFILL_RADIUS )
rc_drawEllipse( ELLIPSE_X, ELLIPSE_Y, ELLIPSE_RX, ELLIPSE_RY )
@@ -455,11 +454,12 @@ rc_deleteMesh( DELETEMESH_MESH )
rc_createMesh( )
rc_addMeshBuffer( ADDMESHBUFFER_MESH, ADDMESHBUFFER_VERTEX_COUNT, &ADDMESHBUFFER_VERTEX_DATA, &ADDMESHBUFFER_NORMAL_DATA, &ADDMESHBUFFER_UV_DATA, ADDMESHBUFFER_INDEX_COUNT, &ADDMESHBUFFER_INDEX_DATA )
rc_loadMeshFromArchive( LOADMESHFROMARCHIVE_ARCHIVE$, LOADMESHFROMARCHIVE_MESH_FILE$ )
rc_createPlaneMesh( CREATEPLANEMESH_W, CREATEPLANEMESH_H, CREATEPLANEMESH_TILECOUNT_W, CREATEPLANEMESH_TILECOUNT_H )
rc_createMeshActor( CREATEMESHACTOR_MESH )
rc_createMeshOctreeActor( CREATEMESHOCTREEACTOR_MESH )
rc_createCubeActor( CREATECUBEACTOR_CUBE_SIZE )
rc_createSphereActor( CREATESPHEREACTOR_RADIUS )
rc_createWaterPlaneActor( CREATEWATERPLANEACTOR_W, CREATEWATERPLANEACTOR_H )
rc_createWaterActor( CREATEWATERACTOR_MESH, CREATEWATERACTOR_WAVEHEIGHT, CREATEWATERACTOR_WAVESPEED, CREATEWATERACTOR_WAVELENGTH )
rc_createLightActor( )
rc_createBillboardActor( )
rc_createTerrainActor( CREATETERRAINACTOR_HMAP_FILE$ )
@@ -743,16 +743,6 @@ rc_scaleTerrainTexture( SCALETERRAINTEXTURE_ACTOR, SCALETERRAINTEXTURE_SCALE,
rc_setTerrainCameraMovementDelta( SETTERRAINCAMERAMOVEMENTDELTA_ACTOR, SETTERRAINCAMERAMOVEMENTDELTA_DELTA )
rc_setTerrainCameraRotationDelta( SETTERRAINCAMERAROTATIONDELTA_ACTOR, SETTERRAINCAMERAROTATIONDELTA_DELTA )
rc_setTerrainPatchLOD( SETTERRAINPATCHLOD_ACTOR, SETTERRAINPATCHLOD_PATCHX, SETTERRAINPATCHLOD_PATCHZ, SETTERRAINPATCHLOD_LOD )
rc_setWaterWindForce( SETWATERWINDFORCE_ACTOR, SETWATERWINDFORCE_F )
rc_getWaterWindForce( GETWATERWINDFORCE_ACTOR )
rc_setWaterWaveHeight( SETWATERWAVEHEIGHT_ACTOR, SETWATERWAVEHEIGHT_H )
rc_getWaterWaveHeight( GETWATERWAVEHEIGHT_ACTOR )
rc_setWaterWindDirection( SETWATERWINDDIRECTION_ACTOR, SETWATERWINDDIRECTION_X, SETWATERWINDDIRECTION_Z )
rc_getWaterWindDirection( GETWATERWINDDIRECTION_ACTOR, &GETWATERWINDDIRECTION_X, &GETWATERWINDDIRECTION_Z )
rc_setWaterColor( SETWATERCOLOR_ACTOR, SETWATERCOLOR_C )
rc_getWaterColor( GETWATERCOLOR_ACTOR )
rc_setWaterColorBlendFactor( SETWATERCOLORBLENDFACTOR_ACTOR, SETWATERCOLORBLENDFACTOR_CBFACTOR )
rc_getWaterColorBlendFactor( GETWATERCOLORBLENDFACTOR_ACTOR )
rc_setActorAnimation( SETACTORANIMATION_ACTOR, SETACTORANIMATION_START_FRAME, SETACTORANIMATION_END_FRAME )
rc_setActorAnimationSpeed( SETACTORANIMATIONSPEED_ACTOR, SETACTORANIMATIONSPEED_SPEED )
rc_setActorFrame( SETACTORFRAME_ACTOR, SETACTORFRAME_FRAME )

Binary file not shown.

151
rcbasic_runtime/Android.mk Executable file
View File

@@ -0,0 +1,151 @@
LOCAL_PATH := $(call my-dir)
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := Bullet-prebuilt
LOCAL_SRC_FILES := ../bullet3/build3/Android/obj/local/$(TARGET_ARCH_ABI)/libBullet.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/..
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := freetype-prebuilt
LOCAL_SRC_FILES := ../freetype/Android/libs/$(TARGET_ARCH_ABI)/libfreetype.so
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/..
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := main
SDL_PATH := ../SDL
SDL_IMAGE_PATH := ../SDL2_image
SDL_MIXER_PATH := ../SDL2_mixer
SDL_NET_PATH := ../SDL2_net
THEORA_PATH := ../theora
IRR_PATH := ../RCIrrlicht
BULLET_PATH := ../bullet3
IRRBULLET_PATH := ../irrBullet
IRRTHEORA_PATH := ../irrTheora
FREETYPE_PATH := ../freetype
BOX2D_PATH := ../box2d-2.4.2
AN8_PATH := ../an8-parser
VORBIS_LIBRARY_PATH := ../libvorbis64
THEORAPLAY_PATH := ../src/theoraplay
ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
VORBIS_LIBRARY_PATH := ../libvorbisidec-1.2.1
THEORAPLAY_PATH := ../src/theoraplay_tremor
endif
LOCAL_CFLAGS := -I$(LOCAL_PATH)/$(THEORAPLAY_PATH) -I$(LOCAL_PATH)/$(IRR_PATH)/include -DRC_ANDROID_BUILD
LOCAL_CPP_FEATURES += exceptions
LOCAL_C_INCLUDES := $(LOCAL_PATH)/$(SDL_PATH)/include \
$(LOCAL_PATH)/$(SDL_IMAGE_PATH) \
$(LOCAL_PATH)/$(SDL_MIXER_PATH) \
$(LOCAL_PATH)/$(SDL_NET_PATH) \
$(LOCAL_PATH)/$(THEORA_PATH) \
$(LOCAL_PATH)/$(VORBIS_LIBRARY_PATH)/include \
$(LOCAL_PATH)/$(THEORAPLAY_PATH) \
$(LOCAL_PATH)/$(IRR_PATH)/include \
$(LOCAL_PATH)/$(BULLET_PATH)/src \
$(LOCAL_PATH)/$(IRRBULLET_PATH)/include \
$(LOCAL_PATH)/$(IRRTHEORA_PATH) \
$(LOCAL_PATH)/$(FREETYPE_PATH)/include \
$(LOCAL_PATH)/$(AN8_PATH) \
$(LOCAL_PATH)/$(BOX2D_PATH)/include
# Add your application source files here...
LOCAL_SRC_FILES := main.cpp $(LOCAL_PATH)/$(THEORAPLAY_PATH)/theoraplay.c gui_freetype_font.cpp
LOCAL_SRC_FILES += \
$(BOX2D_PATH)/src/collision/b2_broad_phase.cpp \
$(BOX2D_PATH)/src/collision/b2_chain_shape.cpp \
$(BOX2D_PATH)/src/collision/b2_circle_shape.cpp \
$(BOX2D_PATH)/src/collision/b2_collide_circle.cpp \
$(BOX2D_PATH)/src/collision/b2_collide_edge.cpp \
$(BOX2D_PATH)/src/collision/b2_collide_polygon.cpp \
$(BOX2D_PATH)/src/collision/b2_collision.cpp \
$(BOX2D_PATH)/src/collision/b2_distance.cpp \
$(BOX2D_PATH)/src/collision/b2_dynamic_tree.cpp \
$(BOX2D_PATH)/src/collision/b2_edge_shape.cpp \
$(BOX2D_PATH)/src/collision/b2_polygon_shape.cpp \
$(BOX2D_PATH)/src/collision/b2_time_of_impact.cpp \
$(BOX2D_PATH)/src/common/b2_block_allocator.cpp \
$(BOX2D_PATH)/src/common/b2_draw.cpp \
$(BOX2D_PATH)/src/common/b2_math.cpp \
$(BOX2D_PATH)/src/common/b2_settings.cpp \
$(BOX2D_PATH)/src/common/b2_stack_allocator.cpp \
$(BOX2D_PATH)/src/common/b2_timer.cpp \
$(BOX2D_PATH)/src/dynamics/b2_body.cpp \
$(BOX2D_PATH)/src/dynamics/b2_chain_circle_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_chain_polygon_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_circle_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_contact_manager.cpp \
$(BOX2D_PATH)/src/dynamics/b2_contact_solver.cpp \
$(BOX2D_PATH)/src/dynamics/b2_distance_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_edge_circle_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_edge_polygon_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_fixture.cpp \
$(BOX2D_PATH)/src/dynamics/b2_friction_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_gear_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_island.cpp \
$(BOX2D_PATH)/src/dynamics/b2_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_motor_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_mouse_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_polygon_circle_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_polygon_contact.cpp \
$(BOX2D_PATH)/src/dynamics/b2_prismatic_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_pulley_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_revolute_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_weld_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_wheel_joint.cpp \
$(BOX2D_PATH)/src/dynamics/b2_world_callbacks.cpp \
$(BOX2D_PATH)/src/dynamics/b2_world.cpp \
$(BOX2D_PATH)/src/rope/b2_rope.cpp \
$(IRRBULLET_PATH)/src/irrBulletBoxShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletBvhTriangleMeshShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletCapsuleShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletCollisionCallBackInformation.cpp \
$(IRRBULLET_PATH)/src/irrBulletCollisionObjectAffectorAttract.cpp \
$(IRRBULLET_PATH)/src/irrBulletCollisionObjectAffector.cpp \
$(IRRBULLET_PATH)/src/irrBulletCollisionObjectAffectorDelete.cpp \
$(IRRBULLET_PATH)/src/irrBulletCollisionObject.cpp \
$(IRRBULLET_PATH)/src/irrBulletCollisionShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletcommon.cpp \
$(IRRBULLET_PATH)/src/irrBulletConeShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletConvexHullShape.cpp \
$(IRRBULLET_PATH)/src/irrBullet.cpp \
$(IRRBULLET_PATH)/src/irrBulletCylinderShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletGhostObject.cpp \
$(IRRBULLET_PATH)/src/irrBulletGImpactMeshShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletLiquidBody.cpp \
$(IRRBULLET_PATH)/src/irrBulletMotionState.cpp \
$(IRRBULLET_PATH)/src/irrBulletPhysicsDebug.cpp \
$(IRRBULLET_PATH)/src/irrBulletRayCastVehicle.cpp \
$(IRRBULLET_PATH)/src/irrBulletRigidBody.cpp \
$(IRRBULLET_PATH)/src/irrBulletSoftBody.cpp \
$(IRRBULLET_PATH)/src/irrBulletSphereShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletTriangleMeshShape.cpp \
$(IRRBULLET_PATH)/src/irrBulletWorld.cpp
LOCAL_SHARED_LIBRARIES := SDL2 SDL2_image SDL2_mixer SDL2_net Irrlicht freetype-prebuilt
LOCAL_STATIC_LIBRARIES := Bullet-prebuilt android_native_app_glue
RC_TH_LOCAL_SHARED_LIBRARIES := ogg_shared vorbis64 theora
ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
RC_TH_LOCAL_SHARED_LIBRARIES := ogg_shared tremor theora
LOCAL_CFLAGS += -DRC_USE_TREMOR -O2
endif
LOCAL_SHARED_LIBRARIES += $(RC_TH_LOCAL_SHARED_LIBRARIES)
LOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -lOpenSLES -llog -landroid
include $(BUILD_SHARED_LIBRARY)

13
rcbasic_runtime/CMakeLists.txt Executable file
View File

@@ -0,0 +1,13 @@
cmake_minimum_required(VERSION 3.6)
project(MY_APP)
find_library(SDL2 SDL2)
add_library(main SHARED)
target_sources(main PRIVATE YourSourceHere.c)
target_link_libraries(main SDL2)

View File

@@ -1,3 +1,5 @@
#include "rc_os_defines.h"
#ifndef _GUI_FREETYPE_FONT_H
#define _GUI_FREETYPE_FONT_H
@@ -6,9 +8,14 @@
#if COMPILE_WITH_FREETYPE
#include <freetype2/ft2build.h>
#include <freetype/freetype.h>
#ifdef RC_ANDROID
#include "ft2build.h"
#include "freetype/freetype.h"
#else
#include <freetype2/ft2build.h>
#include <freetype/freetype.h>
#endif // RC_ANDROID
#include <irrlicht.h>
class CGUITTFace : public irr::IReferenceCounted

View File

@@ -27,6 +27,7 @@
#ifdef RC_ANDROID
#include <android_native_app_glue.h>
#include <jni.h>
#include <sys/param.h>
#include <algorithm>
@@ -4353,6 +4354,11 @@ void rcbasic_test()
SDL_DestroyWindow(win);
}
#ifdef RC_ANDROID
void android_main( android_app* application )
{
}
#else
int main(int argc, char * argv[])
{
//rcbasic_test();
@@ -4481,3 +4487,4 @@ int main(int argc, char * argv[])
//cout << "Hello world!" << endl;
return 0;
}
#endif

View File

@@ -52,7 +52,7 @@ int GetLinePlaneIntersection(double* line_point, double* line_direction, double*
//'# check if the intersection point is on the plane
double plane_distance = abs((intersection[0] - plane_point_1[0]) * plane_normal[0] + (intersection[1] - plane_point_1[1]) * plane_normal[1] + (intersection[2] - plane_point_1[2]) * plane_normal[2]);
if(plane_distance < 10^-6)
if(plane_distance < (10^-6))
return true;
else
return false;

View File

@@ -1,8 +1,19 @@
#ifndef RC_GFX_INCLUDED
#define RC_GFX_INCLUDED
#include <SDL2/SDL.h>
#include <irrlicht.h>
#ifdef RC_ANDROID
#include "SDL.h"
#else
#include <SDL2/SDL.h>
#endif // RC_ANDROID
#ifdef RC_ANDROID
#include <irrlicht.h>
#include <btBulletDynamicsCommon.h>
#else
#include <irrlicht.h>
#include <bullet/btBulletDynamicsCommon.h>
#endif // RC_ANDROID
#include <iostream>
#include <sstream>
#include <string>
@@ -15,7 +26,6 @@
#include "rc_utf8.h"
#include <box2d/box2d.h>
#include "rc_sprite2D.h"
#include <bullet/btBulletDynamicsCommon.h>
#include <irrtheora.h>
using namespace irr;
@@ -286,7 +296,7 @@ bool rc_windowOpenEx(std::string title, int x, int y, int w, int h, uint32_t win
rc_font.clear();
rc_canvas_obj back_buffer;
back_buffer.texture = VideoDriver->addRenderTargetTexture(irr::core::dimension2d((irr::u32)w, (irr::u32)h), "rt", ECF_A8R8G8B8);
back_buffer.texture = VideoDriver->addRenderTargetTexture(irr::core::dimension2d<irr::u32>((irr::u32)w, (irr::u32)h), "rt", ECF_A8R8G8B8);
back_buffer.dimension.Width = w;
back_buffer.dimension.Height = h;
back_buffer.viewport.position.set(0,0);
@@ -2197,7 +2207,7 @@ int rc_createImageEx(int w, int h, double * pdata, Uint32 colorkey, bool use_col
if(w <= 0 || h <=0)
return -1;
irr::video::IImage* image = VideoDriver->createImage(irr::video::ECF_A8R8G8B8, irr::core::dimension2d((irr::u32)w,(irr::u32)h));
irr::video::IImage* image = VideoDriver->createImage(irr::video::ECF_A8R8G8B8, irr::core::dimension2d<irr::u32>((irr::u32)w,(irr::u32)h));
if(!image)
return -1;
@@ -2513,7 +2523,7 @@ void rc_drawImage(int img_id, int x, int y)
if(rc_image[img_id].image)
{
irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(0, 0), src_size);
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(0, 0), src_size);
irr::core::position2d<irr::s32> position(x, y);
@@ -2552,7 +2562,7 @@ int rc_copyImage(int src_id)
irr::core::dimension2d<irr::u32> src_size = rc_image[src_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(0, 0), src_size);
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(0, 0), src_size);
irr::core::position2d<irr::s32> position(0, 0);
irr::core::position2d<irr::s32> rotationPoint(0, 0); //since we are not rotating it doesn't matter
irr::f32 rotation = 0;
@@ -2660,7 +2670,7 @@ void rc_drawImage_ZoomEx(int img_id, int x, int y, int src_x, int src_y, int src
if(rc_image[img_id].image)
{
//irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::position2d<irr::s32> position(x, y);
@@ -2716,7 +2726,7 @@ void rc_drawImage_RotozoomEx(int img_id, int x, int y, int src_x, int src_y, int
if(rc_image[img_id].image)
{
//irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::position2d<irr::s32> position(x, y);
@@ -2774,7 +2784,7 @@ void rc_drawImage_FlipEx(int img_id, int x, int y, int src_x, int src_y, int src
if(rc_image[img_id].image)
{
//irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::position2d<irr::s32> rotationPoint(x + (src_w/2), y + (src_h/2));
@@ -2804,7 +2814,7 @@ void rc_drawImage_Blit(int img_id, int x, int y, int src_x, int src_y, int src_w
if(rc_image[img_id].image)
{
//irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::position2d<irr::s32> position(x, y);
@@ -2818,7 +2828,7 @@ void rc_drawImage_Blit(int img_id, int x, int y, int src_x, int src_y, int src_w
rc_image[img_id].color_mod.getGreen(),
rc_image[img_id].color_mod.getBlue());
irr::core::rect<irr::s32> dest( irr::core::vector2d(x, y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> dest( irr::core::vector2d<irr::s32>(x, y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::vector2df screenSize(rc_canvas[rc_active_canvas].dimension.Width, rc_canvas[rc_active_canvas].dimension.Height);
@@ -2835,7 +2845,7 @@ void rc_drawImage_RotateEx(int img_id, int x, int y, int src_x, int src_y, int s
if(rc_image[img_id].image)
{
//irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
//irr::core::position2d<irr::s32> position(x, y);
@@ -2851,7 +2861,7 @@ void rc_drawImage_RotateEx(int img_id, int x, int y, int src_x, int src_y, int s
irr::core::vector2df screenSize(rc_canvas[rc_active_canvas].dimension.Width, rc_canvas[rc_active_canvas].dimension.Height);
irr::core::rect<irr::s32> dest( irr::core::vector2d(x, y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> dest( irr::core::vector2d<irr::s32>(x, y), irr::core::dimension2d<irr::s32>(src_w, src_h));
draw2DImage2(VideoDriver, rc_image[img_id].image, sourceRect, dest, rotationPoint, rotation, useAlphaChannel, color, screenSize);
}
@@ -2865,7 +2875,7 @@ void rc_drawImage_BlitEx(int img_id, int x, int y, int w, int h, int src_x, int
if(rc_image[img_id].image)
{
//irr::core::dimension2d<irr::u32> src_size = rc_image[img_id].image->getSize();
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
//irr::core::position2d<irr::s32> position(x, y);
@@ -2879,7 +2889,7 @@ void rc_drawImage_BlitEx(int img_id, int x, int y, int w, int h, int src_x, int
rc_image[img_id].color_mod.getGreen(),
rc_image[img_id].color_mod.getBlue());
irr::core::rect<irr::s32> dest( irr::core::vector2d(x, y), irr::core::dimension2d(w, h));
irr::core::rect<irr::s32> dest( irr::core::vector2d<irr::s32>(x, y), irr::core::dimension2d<irr::s32>(w, h));
irr::core::vector2df screenSize(rc_canvas[rc_active_canvas].dimension.Width, rc_canvas[rc_active_canvas].dimension.Height);
@@ -3028,7 +3038,7 @@ void drawCanvasImage(irr::video::ITexture* texture, int x, int y, int src_x, int
{
if(texture)
{
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d(src_x, src_y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> sourceRect( irr::core::vector2d<irr::s32>(src_x, src_y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::position2d<irr::s32> position(x, y);
@@ -3039,7 +3049,7 @@ void drawCanvasImage(irr::video::ITexture* texture, int x, int y, int src_x, int
bool useAlphaChannel = true;
irr::video::SColor color(255,255,255,255);
irr::core::rect<irr::s32> dest( irr::core::vector2d(x, y), irr::core::dimension2d(src_w, src_h));
irr::core::rect<irr::s32> dest( irr::core::vector2d<irr::s32>(x, y), irr::core::dimension2d<irr::s32>(src_w, src_h));
irr::core::vector2df screenSize(tgt_width, tgt_height);
@@ -3061,7 +3071,7 @@ int rc_windowClip(int x, int y, int w, int h)
else
return -1;
irr::video::ITexture* texture = VideoDriver->addRenderTargetTexture(irr::core::dimension2d((irr::u32)w, (irr::u32)h), "win_clip_image", irr::video::ECF_A8R8G8B8);
irr::video::ITexture* texture = VideoDriver->addRenderTargetTexture(irr::core::dimension2d<irr::u32>((irr::u32)w, (irr::u32)h), "win_clip_image", irr::video::ECF_A8R8G8B8);
if(!texture)
return -1;
@@ -3118,7 +3128,7 @@ int rc_canvasClip(int x, int y, int w, int h)
else
return -1;
irr::video::ITexture* texture = VideoDriver->addRenderTargetTexture(irr::core::dimension2d((irr::u32)w, (irr::u32)h), "canvas_clip_image", irr::video::ECF_A8R8G8B8);
irr::video::ITexture* texture = VideoDriver->addRenderTargetTexture(irr::core::dimension2d<irr::u32>((irr::u32)w, (irr::u32)h), "canvas_clip_image", irr::video::ECF_A8R8G8B8);
if(!texture)
return -1;
@@ -3310,7 +3320,7 @@ void drawSprites(int canvas_id)
continue;
src_size = rc_image[img_id].image->getSize();
sourceRect = irr::core::rect<irr::s32>( irr::core::vector2d(0, 0), src_size);
sourceRect = irr::core::rect<irr::s32>( irr::core::vector2d<irr::s32>(0, 0), src_size);
physics_pos = sprite->physics.body->GetPosition();
x = (int)physics_pos.x;
@@ -3694,11 +3704,11 @@ bool rc_update()
VideoDriver->setRenderTarget(rc_canvas[0].texture);
irr::core::vector2d<s32> bb_position(0,0);
irr::core::dimension2d<u32> bb_dimension(win_w, win_h);
VideoDriver->setViewPort( irr::core::rect(bb_position, bb_dimension) );
VideoDriver->setViewPort( irr::core::rect<irr::s32>(bb_position, bb_dimension) );
irr::core::vector2d screenSize( (irr::f32) rc_canvas[0].dimension.Width, (irr::f32) rc_canvas[0].dimension.Height );
irr::core::vector2d<irr::f32> screenSize( (irr::f32) rc_canvas[0].dimension.Width, (irr::f32) rc_canvas[0].dimension.Height );
double frame_current_time = ((double)SDL_GetTicks())/1000.0d;
double frame_current_time = ((double)SDL_GetTicks())/1000.0;
for(int i = 0; i < rc_transition_actor.size();)
{
@@ -3774,7 +3784,7 @@ bool rc_update()
if(rc_canvas[canvas_id].type == RC_CANVAS_TYPE_SPRITE)
drawSprites(canvas_id);
draw2DImage2(VideoDriver, rc_canvas[canvas_id].texture, src, dest, irr::core::vector2d<irr::s32>(0, 0), 0, true, color, screenSize);
draw2DImage2(VideoDriver, rc_canvas[canvas_id].texture, src, dest, irr::core::position2d<irr::s32>(0, 0), 0, true, color, screenSize);
//drawSprites(canvas_id);
//draw2DImage2(VideoDriver, rc_canvas[canvas_id].sprite_layer, src, dest, irr::core::vector2d<irr::s32>(0, 0), 0, true, color, screenSize);
@@ -3789,7 +3799,7 @@ bool rc_update()
VideoDriver->setRenderTarget(0);
//VideoDriver->beginScene(true, true);
VideoDriver->draw2DImage(rc_canvas[0].texture, irr::core::vector2d(0,0));
VideoDriver->draw2DImage(rc_canvas[0].texture, irr::core::vector2d<irr::s32>(0,0));
//device->getGUIEnvironment()->drawAll();
VideoDriver->endScene();

View File

@@ -1,7 +1,15 @@
#ifndef RC_GFX3D_H_INCLUDED
#define RC_GFX3D_H_INCLUDED
#include <SDL2/SDL.h>
#ifdef RC_ANDROID
#include "SDL.h"
#include <btBulletDynamicsCommon.h>
#include <BulletCollision/CollisionDispatch/btGhostObject.h>
#else
#include <SDL2/SDL.h>
#include <bullet/btBulletDynamicsCommon.h>
#include <bullet/BulletCollision/CollisionDispatch/btGhostObject.h>
#endif // _IRR_ANDROID_PLATFORM_
#include <irrlicht.h>
#include <iostream>
#include <sstream>
@@ -16,9 +24,6 @@
#include "rc_matrix.h"
#include "RealisticWater.h"
#include <bullet/btBulletDynamicsCommon.h>
#include <bullet/BulletCollision/CollisionDispatch/btGhostObject.h>
//load a mesh from a file
int rc_loadMesh(std::string mesh_file)
{
@@ -93,6 +98,103 @@ int rc_loadMeshFromArchive(std::string archive, std::string mesh_file)
return mesh_id;
}
int rc_loadAN8(std::string an8_file)
{
int id = -1;
for(int i = 0; i < rc_an8.size(); i++)
{
if(!rc_an8[i].active)
{
id = i;
break;
}
}
if(id < 0)
{
id = rc_an8.size();
rc_an8_obj obj;
rc_an8.push_back(obj);
}
rc_an8[id].project = an8::loadAN8(an8_file);
if(rc_an8[id].project.exists)
{
rc_an8[id].active = true;
return id;
}
rc_an8[id].active = false;
return -1;
}
//load a mesh from an archive
int rc_loadMeshFromAN8(int an8_id, std::string scene_name)
{
int mesh_id = -1;
if(an8_id < 0 || an8_id >= rc_an8.size())
return -1;
if(!rc_an8[an8_id].active)
return -1;
rc_mesh_obj mesh_obj;
mesh_obj.mesh_type = RC_MESH_TYPE_ANIMATED;
mesh_obj.mesh = an8::loadAN8Scene(device, rc_an8[an8_id].project, scene_name);
if(!mesh_obj.mesh)
return -1;
for(int i = 0; i < rc_mesh.size(); i++)
{
if(!rc_mesh[i].mesh)
{
mesh_id = i;
break;
}
}
if(mesh_id < 0)
{
mesh_id = rc_mesh.size();
rc_mesh.push_back(mesh_obj);
}
else
{
rc_mesh[mesh_id] = mesh_obj;
}
return mesh_id;
}
int rc_getNumAN8Scenes(int an8_id)
{
if(an8_id < 0 || an8_id >= rc_an8.size())
return 0;
if(!rc_an8[an8_id].active)
return 0;
return rc_an8[an8_id].project.scenes.size();
}
std::string rc_getAN8SceneName(int an8_id, int scene_num)
{
if(an8_id < 0 || an8_id >= rc_an8.size())
return "";
if(!rc_an8[an8_id].active)
return "";
if(scene_num < 0 || scene_num >= rc_an8[an8_id].project.scenes.size())
return "";
return rc_an8[an8_id].project.scenes[scene_num].name;
}
//delete mesh
void rc_deleteMesh(int mesh_id)
{
@@ -5402,7 +5504,7 @@ void rc_startActorTransition(int actor, double frame, double transition_time)
node->setCurrentFrame(frame);
rc_actor[actor].transition = true;
rc_actor[actor].transition_time = transition_time;
rc_actor[actor].transition_start_time = ((double)SDL_GetTicks())/1000.0d;
rc_actor[actor].transition_start_time = ((double)SDL_GetTicks())/1000.0;
rc_transition_actor.push_back(actor);
}
}
@@ -5473,7 +5575,7 @@ void rc_getTerrainPatchAABB(int actor, double patch_x, double patch_z, double* m
{
case RC_NODE_TYPE_TERRAIN:
irr::scene::ITerrainSceneNode* node = (irr::scene::ITerrainSceneNode*)rc_actor[actor].mesh_node;
irr::core::aabbox3d bbox = node->getBoundingBox(patch_x, patch_z);
irr::core::aabbox3d<irr::f32> bbox = node->getBoundingBox(patch_x, patch_z);
*min_x = bbox.MinEdge.X;
*min_y = bbox.MinEdge.Y;

View File

@@ -1,7 +1,15 @@
#ifndef RC_GFX_CORE_H_INCLUDED
#define RC_GFX_CORE_H_INCLUDED
#include <SDL2/SDL.h>
#ifdef RC_ANDROID
#include "SDL.h"
#include "btBulletDynamicsCommon.h"
#include "BulletSoftBody/btSoftRigidDynamicsWorld.h"
#else
#include <SDL2/SDL.h>
#include <bullet/btBulletDynamicsCommon.h>
#include <BulletSoftBody/btSoftRigidDynamicsWorld.h>
#endif // _IRR_ANDROID_PLATFORM_
#include <irrlicht.h>
#include <iostream>
#include <sstream>
@@ -15,9 +23,12 @@
#include "camera.h"
#include <box2d/box2d.h>
#include "rc_sprite2D.h"
#include <bullet/btBulletDynamicsCommon.h>
#include <BulletSoftBody/btSoftRigidDynamicsWorld.h>
#include <irrBullet.h>
#ifdef RC_ANDROID
#include "an8parser.h"
#else
#include <an8parser.h>
#endif
using namespace irr;
@@ -396,6 +407,14 @@ struct rc_mesh_obj
};
irr::core::array<rc_mesh_obj> rc_mesh;
struct rc_an8_obj
{
bool active;
an8::an8_project project;
};
irr::core::array<rc_an8_obj> rc_an8;
#define RC_NODE_TYPE_NONE 0
#define RC_NODE_TYPE_MESH 1
#define RC_NODE_TYPE_OTMESH 2

View File

@@ -10,12 +10,17 @@
// libtheora-1.1.1/examples/player_example.c, but this is all my own
// code.
#include "rc_os_defines.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include "SDL2/SDL.h"
#ifdef RC_ANDROID_BUILD
#include "SDL.h"
#else
#include "SDL2/SDL.h"
#endif
#ifdef _WIN32
#include <windows.h>
#define THEORAPLAY_THREAD_T HANDLE

View File

@@ -0,0 +1,825 @@
/**
* TheoraPlay; multithreaded Ogg Theora/Ogg Vorbis decoding.
*
* Please see the file LICENSE.txt in the source's root directory.
*
* This file written by Ryan C. Gordon.
*/
// I wrote this with a lot of peeking at the Theora example code in
// libtheora-1.1.1/examples/player_example.c, but this is all my own
// code.
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#ifdef RC_ANDROID_BUILD
#include "SDL.h"
#else
#include "SDL2/SDL.h"
#endif
#ifdef _WIN32
#include <windows.h>
#define THEORAPLAY_THREAD_T HANDLE
#define THEORAPLAY_MUTEX_T HANDLE
#define sleepms(x) Sleep(x)
#else
#include <pthread.h>
#include <unistd.h>
#define sleepms(x) usleep((x) * 1000)
#define THEORAPLAY_THREAD_T pthread_t
#define THEORAPLAY_MUTEX_T pthread_mutex_t
#endif
#include "theoraplay.h"
#include "theora/theoradec.h"
#include "vorbis/codec.h"
#define THEORAPLAY_INTERNAL 1
typedef THEORAPLAY_VideoFrame VideoFrame;
typedef THEORAPLAY_AudioPacket AudioPacket;
// !!! FIXME: these all count on the pixel format being TH_PF_420 for now.
typedef unsigned char *(*ConvertVideoFrameFn)(const th_info *tinfo,
const th_ycbcr_buffer ycbcr);
static unsigned char *ConvertVideoFrame420ToYUVPlanar(
const th_info *tinfo, const th_ycbcr_buffer ycbcr,
const int p0, const int p1, const int p2)
{
int i;
const int w = tinfo->pic_width;
const int h = tinfo->pic_height;
const int yoff = (tinfo->pic_x & ~1) + ycbcr[0].stride * (tinfo->pic_y & ~1);
const int uvoff = (tinfo->pic_x / 2) + (ycbcr[1].stride) * (tinfo->pic_y / 2);
unsigned char *yuv = (unsigned char *) malloc(w * h * 2);
if (yuv)
{
unsigned char *dst = yuv;
for (i = 0; i < h; i++, dst += w)
memcpy(dst, ycbcr[p0].data + yoff + ycbcr[p0].stride * i, w);
for (i = 0; i < (h / 2); i++, dst += w/2)
memcpy(dst, ycbcr[p1].data + uvoff + ycbcr[p1].stride * i, w / 2);
for (i = 0; i < (h / 2); i++, dst += w/2)
memcpy(dst, ycbcr[p2].data + uvoff + ycbcr[p2].stride * i, w / 2);
} // if
return yuv;
} // ConvertVideoFrame420ToYUVPlanar
static unsigned char *ConvertVideoFrame420ToYV12(const th_info *tinfo,
const th_ycbcr_buffer ycbcr)
{
return ConvertVideoFrame420ToYUVPlanar(tinfo, ycbcr, 0, 2, 1);
} // ConvertVideoFrame420ToYV12
static unsigned char *ConvertVideoFrame420ToIYUV(const th_info *tinfo,
const th_ycbcr_buffer ycbcr)
{
return ConvertVideoFrame420ToYUVPlanar(tinfo, ycbcr, 0, 1, 2);
} // ConvertVideoFrame420ToIYUV
// RGB
#define THEORAPLAY_CVT_FNNAME_420 ConvertVideoFrame420ToRGB
#define THEORAPLAY_CVT_RGB_ALPHA 0
#include "theoraplay_cvtrgb.h"
#undef THEORAPLAY_CVT_RGB_ALPHA
#undef THEORAPLAY_CVT_FNNAME_420
// RGBA
#define THEORAPLAY_CVT_FNNAME_420 ConvertVideoFrame420ToRGBA
#define THEORAPLAY_CVT_RGB_ALPHA 1
#include "theoraplay_cvtrgb.h"
#undef THEORAPLAY_CVT_RGB_ALPHA
#undef THEORAPLAY_CVT_FNNAME_420
typedef struct TheoraDecoder
{
// Thread wrangling...
int thread_created;
THEORAPLAY_MUTEX_T lock;
volatile int halt;
int thread_done;
THEORAPLAY_THREAD_T worker;
// API state...
THEORAPLAY_Io *io;
unsigned int maxframes; // Max video frames to buffer.
volatile unsigned int prepped;
volatile unsigned int videocount; // currently buffered frames.
volatile unsigned int audioms; // currently buffered audio samples.
volatile int hasvideo;
volatile int hasaudio;
volatile int decode_error;
THEORAPLAY_VideoFormat vidfmt;
ConvertVideoFrameFn vidcvt;
VideoFrame *videolist;
VideoFrame *videolisttail;
AudioPacket *audiolist;
AudioPacket *audiolisttail;
} TheoraDecoder;
#ifdef _WIN32
static inline int Thread_Create(TheoraDecoder *ctx, void *(*routine) (void*))
{
ctx->worker = CreateThread(
NULL,
0,
(LPTHREAD_START_ROUTINE) routine,
(LPVOID) ctx,
0,
NULL
);
return (ctx->worker == NULL);
}
static inline void Thread_Join(THEORAPLAY_THREAD_T thread)
{
WaitForSingleObject(thread, INFINITE);
CloseHandle(thread);
}
static inline int Mutex_Create(TheoraDecoder *ctx)
{
ctx->lock = CreateMutex(NULL, FALSE, NULL);
return (ctx->lock == NULL);
}
static inline void Mutex_Destroy(THEORAPLAY_MUTEX_T mutex)
{
CloseHandle(mutex);
}
static inline void Mutex_Lock(THEORAPLAY_MUTEX_T mutex)
{
WaitForSingleObject(mutex, INFINITE);
}
static inline void Mutex_Unlock(THEORAPLAY_MUTEX_T mutex)
{
ReleaseMutex(mutex);
}
#else
static inline int Thread_Create(TheoraDecoder *ctx, void *(*routine) (void*))
{
return pthread_create(&ctx->worker, NULL, routine, ctx);
}
static inline void Thread_Join(THEORAPLAY_THREAD_T thread)
{
pthread_join(thread, NULL);
}
static inline int Mutex_Create(TheoraDecoder *ctx)
{
return pthread_mutex_init(&ctx->lock, NULL);
}
static inline void Mutex_Destroy(THEORAPLAY_MUTEX_T mutex)
{
pthread_mutex_destroy(&mutex);
}
static inline void Mutex_Lock(THEORAPLAY_MUTEX_T mutex)
{
pthread_mutex_lock(&mutex);
}
static inline void Mutex_Unlock(THEORAPLAY_MUTEX_T mutex)
{
pthread_mutex_unlock(&mutex);
}
#endif
static int FeedMoreOggData(THEORAPLAY_Io *io, ogg_sync_state *sync)
{
long buflen = 4096;
char *buffer = ogg_sync_buffer(sync, buflen);
if (buffer == NULL)
return -1;
buflen = io->read(io, buffer, buflen);
if (buflen <= 0)
return 0;
return (ogg_sync_wrote(sync, buflen) == 0) ? 1 : -1;
} // FeedMoreOggData
// This massive function is where all the effort happens.
static void WorkerThread(TheoraDecoder *ctx)
{
// make sure we initialized the stream before using pagein, but the stream
// will know to ignore pages that aren't meant for it, so pass to both.
#define queue_ogg_page(ctx) do { \
if (tpackets) ogg_stream_pagein(&tstream, &page); \
if (vpackets) ogg_stream_pagein(&vstream, &page); \
} while (0)
unsigned long audioframes = 0;
unsigned long videoframes = 0;
double fps = 0.0;
int was_error = 1; // resets to 0 at the end.
int eos = 0; // end of stream flag.
// Too much Ogg/Vorbis/Theora state...
ogg_packet packet;
ogg_sync_state sync;
ogg_page page;
int vpackets = 0;
vorbis_info vinfo;
vorbis_comment vcomment;
ogg_stream_state vstream;
int vdsp_init = 0;
vorbis_dsp_state vdsp;
int tpackets = 0;
th_info tinfo;
th_comment tcomment;
ogg_stream_state tstream;
int vblock_init = 0;
vorbis_block vblock;
th_dec_ctx *tdec = NULL;
th_setup_info *tsetup = NULL;
ogg_sync_init(&sync);
vorbis_info_init(&vinfo);
vorbis_comment_init(&vcomment);
th_comment_init(&tcomment);
th_info_init(&tinfo);
int bos = 1;
while (!ctx->halt && bos)
{
if (FeedMoreOggData(ctx->io, &sync) <= 0)
goto cleanup;
// parse out the initial header.
while ( (!ctx->halt) && (ogg_sync_pageout(&sync, &page) > 0) )
{
ogg_stream_state test;
if (!ogg_page_bos(&page)) // not a header.
{
queue_ogg_page(ctx);
bos = 0;
break;
} // if
ogg_stream_init(&test, ogg_page_serialno(&page));
ogg_stream_pagein(&test, &page);
ogg_stream_packetout(&test, &packet);
if (!tpackets && (th_decode_headerin(&tinfo, &tcomment, &tsetup, &packet) >= 0))
{
memcpy(&tstream, &test, sizeof (test));
tpackets = 1;
} // if
else if (!vpackets && (vorbis_synthesis_headerin(&vinfo, &vcomment, &packet) >= 0))
{
memcpy(&vstream, &test, sizeof (test));
vpackets = 1;
} // else if
else
{
// whatever it is, we don't care about it
ogg_stream_clear(&test);
} // else
} // while
} // while
// no audio OR video?
if (ctx->halt || (!vpackets && !tpackets))
goto cleanup;
// apparently there are two more theora and two more vorbis headers next.
while ((!ctx->halt) && ((tpackets && (tpackets < 3)) || (vpackets && (vpackets < 3))))
{
while (!ctx->halt && tpackets && (tpackets < 3))
{
if (ogg_stream_packetout(&tstream, &packet) != 1)
break; // get more data?
if (!th_decode_headerin(&tinfo, &tcomment, &tsetup, &packet))
goto cleanup;
tpackets++;
} // while
while (!ctx->halt && vpackets && (vpackets < 3))
{
if (ogg_stream_packetout(&vstream, &packet) != 1)
break; // get more data?
if (vorbis_synthesis_headerin(&vinfo, &vcomment, &packet))
goto cleanup;
vpackets++;
} // while
// get another page, try again?
if (ogg_sync_pageout(&sync, &page) > 0)
queue_ogg_page(ctx);
else if (FeedMoreOggData(ctx->io, &sync) <= 0)
goto cleanup;
} // while
// okay, now we have our streams, ready to set up decoding.
if (!ctx->halt && tpackets)
{
// th_decode_alloc() docs say to check for insanely large frames yourself.
if ((tinfo.frame_width > 99999) || (tinfo.frame_height > 99999))
goto cleanup;
// We treat "unspecified" as NTSC. *shrug*
if ( (tinfo.colorspace != TH_CS_UNSPECIFIED) &&
(tinfo.colorspace != TH_CS_ITU_REC_470M) &&
(tinfo.colorspace != TH_CS_ITU_REC_470BG) )
{
assert(0 && "Unsupported colorspace."); // !!! FIXME
goto cleanup;
} // if
if (tinfo.pixel_fmt != TH_PF_420) { assert(0); goto cleanup; } // !!! FIXME
if (tinfo.fps_denominator != 0)
fps = ((double) tinfo.fps_numerator) / ((double) tinfo.fps_denominator);
tdec = th_decode_alloc(&tinfo, tsetup);
if (!tdec) goto cleanup;
// Set decoder to maximum post-processing level.
// Theoretically we could try dropping this level if we're not keeping up.
int pp_level_max = 0;
// !!! FIXME: maybe an API to set this?
//th_decode_ctl(tdec, TH_DECCTL_GET_PPLEVEL_MAX, &pp_level_max, sizeof(pp_level_max));
th_decode_ctl(tdec, TH_DECCTL_SET_PPLEVEL, &pp_level_max, sizeof(pp_level_max));
} // if
// Done with this now.
if (tsetup != NULL)
{
th_setup_free(tsetup);
tsetup = NULL;
} // if
if (!ctx->halt && vpackets)
{
vdsp_init = (vorbis_synthesis_init(&vdsp, &vinfo) == 0);
if (!vdsp_init)
goto cleanup;
vblock_init = (vorbis_block_init(&vdsp, &vblock) == 0);
if (!vblock_init)
goto cleanup;
} // if
// Now we can start the actual decoding!
// Note that audio and video don't _HAVE_ to start simultaneously.
Mutex_Lock(ctx->lock);
ctx->prepped = 1;
ctx->hasvideo = (tpackets != 0);
ctx->hasaudio = (vpackets != 0);
Mutex_Unlock(ctx->lock);
while (!ctx->halt && !eos)
{
int need_pages = 0; // need more Ogg pages?
int saw_video_frame = 0;
// Try to read as much audio as we can at once. We limit the outer
// loop to one video frame and as much audio as we can eat.
while (!ctx->halt && vpackets)
{
float **pcm = NULL;
const int frames = vorbis_synthesis_pcmout(&vdsp, &pcm);
if (frames > 0)
{
const int channels = vinfo.channels;
int chanidx, frameidx;
float *samples;
AudioPacket *item = (AudioPacket *) malloc(sizeof (AudioPacket));
if (item == NULL) goto cleanup;
item->playms = (unsigned long) ((((double) audioframes) / ((double) vinfo.rate)) * 1000.0);
item->channels = channels;
item->freq = vinfo.rate;
item->frames = frames;
item->samples = (float *) malloc(sizeof (float) * frames * channels);
item->next = NULL;
if (item->samples == NULL)
{
free(item);
goto cleanup;
} // if
// I bet this beats the crap out of the CPU cache...
samples = item->samples;
for (frameidx = 0; frameidx < frames; frameidx++)
{
for (chanidx = 0; chanidx < channels; chanidx++)
*(samples++) = pcm[chanidx][frameidx];
} // for
vorbis_synthesis_read(&vdsp, frames); // we ate everything.
audioframes += frames;
//printf("Decoded %d frames of audio.\n", (int) frames);
Mutex_Lock(ctx->lock);
ctx->audioms += item->playms;
if (ctx->audiolisttail)
{
assert(ctx->audiolist);
ctx->audiolisttail->next = item;
} // if
else
{
assert(!ctx->audiolist);
ctx->audiolist = item;
} // else
ctx->audiolisttail = item;
Mutex_Unlock(ctx->lock);
} // if
else // no audio available left in current packet?
{
// try to feed another packet to the Vorbis stream...
if (ogg_stream_packetout(&vstream, &packet) <= 0)
{
if (!tpackets)
need_pages = 1; // no video, get more pages now.
break; // we'll get more pages when the video catches up.
} // if
else
{
if (vorbis_synthesis(&vblock, &packet) == 0)
vorbis_synthesis_blockin(&vdsp, &vblock);
} // else
} // else
} // while
if (!ctx->halt && tpackets)
{
// Theora, according to example_player.c, is
// "one [packet] in, one [frame] out."
if (ogg_stream_packetout(&tstream, &packet) <= 0)
need_pages = 1;
else
{
ogg_int64_t granulepos = 0;
const int rc = th_decode_packetin(tdec, &packet, &granulepos);
if (rc == TH_DUPFRAME)
videoframes++; // nothing else to do.
else if (rc == 0) // new frame!
{
th_ycbcr_buffer ycbcr;
if (th_decode_ycbcr_out(tdec, ycbcr) == 0)
{
VideoFrame *item = (VideoFrame *) malloc(sizeof (VideoFrame));
if (item == NULL) goto cleanup;
item->playms = (fps == 0) ? 0 : (unsigned int) ((((double) videoframes) / fps) * 1000.0);
item->fps = fps;
item->width = tinfo.pic_width;
item->height = tinfo.pic_height;
item->format = ctx->vidfmt;
item->pixels = ctx->vidcvt(&tinfo, ycbcr);
item->next = NULL;
if (item->pixels == NULL)
{
free(item);
goto cleanup;
} // if
//printf("Decoded another video frame.\n");
Mutex_Lock(ctx->lock);
if (ctx->videolisttail)
{
assert(ctx->videolist);
ctx->videolisttail->next = item;
} // if
else
{
assert(!ctx->videolist);
ctx->videolist = item;
} // else
ctx->videolisttail = item;
ctx->videocount++;
Mutex_Unlock(ctx->lock);
saw_video_frame = 1;
} // if
videoframes++;
} // if
} // else
} // if
if (!ctx->halt && need_pages)
{
const int rc = FeedMoreOggData(ctx->io, &sync);
if (rc == 0)
eos = 1; // end of stream
else if (rc < 0)
goto cleanup; // i/o error, etc.
else
{
while (!ctx->halt && (ogg_sync_pageout(&sync, &page) > 0))
queue_ogg_page(ctx);
} // else
} // if
// Sleep the process until we have space for more frames.
if (saw_video_frame)
{
int go_on = !ctx->halt;
//printf("Sleeping.\n");
while (go_on)
{
// !!! FIXME: This is stupid. I should use a semaphore for this.
Mutex_Lock(ctx->lock);
go_on = !ctx->halt && (ctx->videocount >= ctx->maxframes);
Mutex_Unlock(ctx->lock);
if (go_on)
sleepms(10);
} // while
//printf("Awake!\n");
} // if
} // while
was_error = 0;
cleanup:
ctx->decode_error = (!ctx->halt && was_error);
if (tdec != NULL) th_decode_free(tdec);
if (tsetup != NULL) th_setup_free(tsetup);
if (vblock_init) vorbis_block_clear(&vblock);
if (vdsp_init) vorbis_dsp_clear(&vdsp);
if (tpackets) ogg_stream_clear(&tstream);
if (vpackets) ogg_stream_clear(&vstream);
th_info_clear(&tinfo);
th_comment_clear(&tcomment);
vorbis_comment_clear(&vcomment);
vorbis_info_clear(&vinfo);
ogg_sync_clear(&sync);
ctx->io->close(ctx->io);
ctx->thread_done = 1;
} // WorkerThread
static void *WorkerThreadEntry(void *_this)
{
TheoraDecoder *ctx = (TheoraDecoder *) _this;
WorkerThread(ctx);
//printf("Worker thread is done.\n");
return NULL;
} // WorkerThreadEntry
static long IoFopenRead(THEORAPLAY_Io *io, void *buf, long buflen)
{
SDL_RWops *f = (SDL_RWops *) io->userdata;
const size_t br = SDL_RWread(f, buf, 1, buflen);
if (br == 0)
return -1;
return (long) br;
} // IoFopenRead
static void IoFopenClose(THEORAPLAY_Io *io)
{
SDL_RWops *f = (SDL_RWops *) io->userdata;
SDL_RWclose(f);
free(io);
} // IoFopenClose
THEORAPLAY_Decoder *THEORAPLAY_startDecodeFile(const char *fname,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt)
{
THEORAPLAY_Io *io = (THEORAPLAY_Io *) malloc(sizeof (THEORAPLAY_Io));
if (io == NULL)
return NULL;
SDL_RWops *f = SDL_RWFromFile(fname, "rb");
if (f == NULL)
{
free(io);
return NULL;
} // if
io->read = IoFopenRead;
io->close = IoFopenClose;
io->userdata = f;
return THEORAPLAY_startDecode(io, maxframes, vidfmt);
} // THEORAPLAY_startDecodeFile
THEORAPLAY_Decoder *THEORAPLAY_startDecode(THEORAPLAY_Io *io,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt)
{
TheoraDecoder *ctx = NULL;
ConvertVideoFrameFn vidcvt = NULL;
switch (vidfmt)
{
// !!! FIXME: current expects TH_PF_420.
#define VIDCVT(t) case THEORAPLAY_VIDFMT_##t: vidcvt = ConvertVideoFrame420To##t; break;
VIDCVT(YV12)
VIDCVT(IYUV)
VIDCVT(RGB)
VIDCVT(RGBA)
#undef VIDCVT
default: goto startdecode_failed; // invalid/unsupported format.
} // switch
ctx = (TheoraDecoder *) malloc(sizeof (TheoraDecoder));
if (ctx == NULL)
goto startdecode_failed;
memset(ctx, '\0', sizeof (TheoraDecoder));
ctx->maxframes = maxframes;
ctx->vidfmt = vidfmt;
ctx->vidcvt = vidcvt;
ctx->io = io;
if (Mutex_Create(ctx) == 0)
{
ctx->thread_created = (Thread_Create(ctx, WorkerThreadEntry) == 0);
if (ctx->thread_created)
return (THEORAPLAY_Decoder *) ctx;
} // if
Mutex_Destroy(ctx->lock);
startdecode_failed:
io->close(io);
free(ctx);
return NULL;
} // THEORAPLAY_startDecode
void THEORAPLAY_stopDecode(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
if (!ctx)
return;
if (ctx->thread_created)
{
ctx->halt = 1;
Thread_Join(ctx->worker);
Mutex_Destroy(ctx->lock);
} // if
VideoFrame *videolist = ctx->videolist;
while (videolist)
{
VideoFrame *next = videolist->next;
free(videolist->pixels);
free(videolist);
videolist = next;
} // while
AudioPacket *audiolist = ctx->audiolist;
while (audiolist)
{
AudioPacket *next = audiolist->next;
free(audiolist->samples);
free(audiolist);
audiolist = next;
} // while
free(ctx);
} // THEORAPLAY_stopDecode
int THEORAPLAY_isDecoding(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
int retval = 0;
if (ctx)
{
Mutex_Lock(ctx->lock);
retval = ( ctx && (ctx->audiolist || ctx->videolist ||
(ctx->thread_created && !ctx->thread_done)) );
Mutex_Unlock(ctx->lock);
} // if
return retval;
} // THEORAPLAY_isDecoding
#define GET_SYNCED_VALUE(typ, defval, decoder, member) \
TheoraDecoder *ctx = (TheoraDecoder *) decoder; \
typ retval = defval; \
if (ctx) { \
Mutex_Lock(ctx->lock); \
retval = ctx->member; \
Mutex_Unlock(ctx->lock); \
} \
return retval;
int THEORAPLAY_isInitialized(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, prepped);
} // THEORAPLAY_isInitialized
int THEORAPLAY_hasVideoStream(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, hasvideo);
} // THEORAPLAY_hasVideoStream
int THEORAPLAY_hasAudioStream(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, hasaudio);
} // THEORAPLAY_hasAudioStream
unsigned int THEORAPLAY_availableVideo(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(unsigned int, 0, decoder, videocount);
} // THEORAPLAY_hasAudioStream
unsigned int THEORAPLAY_availableAudio(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(unsigned int, 0, decoder, audioms);
} // THEORAPLAY_hasAudioStream
int THEORAPLAY_decodingError(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, decode_error);
} // THEORAPLAY_decodingError
const THEORAPLAY_AudioPacket *THEORAPLAY_getAudio(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
AudioPacket *retval;
Mutex_Lock(ctx->lock);
retval = ctx->audiolist;
if (retval)
{
ctx->audioms -= retval->playms;
ctx->audiolist = retval->next;
retval->next = NULL;
if (ctx->audiolist == NULL)
ctx->audiolisttail = NULL;
} // if
Mutex_Unlock(ctx->lock);
return retval;
} // THEORAPLAY_getAudio
void THEORAPLAY_freeAudio(const THEORAPLAY_AudioPacket *_item)
{
THEORAPLAY_AudioPacket *item = (THEORAPLAY_AudioPacket *) _item;
if (item != NULL)
{
assert(item->next == NULL);
free(item->samples);
free(item);
} // if
} // THEORAPLAY_freeAudio
const THEORAPLAY_VideoFrame *THEORAPLAY_getVideo(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
VideoFrame *retval;
Mutex_Lock(ctx->lock);
retval = ctx->videolist;
if (retval)
{
ctx->videolist = retval->next;
retval->next = NULL;
if (ctx->videolist == NULL)
ctx->videolisttail = NULL;
assert(ctx->videocount > 0);
ctx->videocount--;
} // if
Mutex_Unlock(ctx->lock);
return retval;
} // THEORAPLAY_getVideo
void THEORAPLAY_freeVideo(const THEORAPLAY_VideoFrame *_item)
{
THEORAPLAY_VideoFrame *item = (THEORAPLAY_VideoFrame *) _item;
if (item != NULL)
{
assert(item->next == NULL);
free(item->pixels);
free(item);
} // if
} // THEORAPLAY_freeVideo
// end of theoraplay.cpp ...

View File

@@ -0,0 +1,85 @@
/**
* TheoraPlay; multithreaded Ogg Theora/Ogg Vorbis decoding.
*
* Please see the file LICENSE.txt in the source's root directory.
*
* This file written by Ryan C. Gordon.
*/
#ifndef _INCL_THEORAPLAY_H_
#define _INCL_THEORAPLAY_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct THEORAPLAY_Io THEORAPLAY_Io;
struct THEORAPLAY_Io
{
long (*read)(THEORAPLAY_Io *io, void *buf, long buflen);
void (*close)(THEORAPLAY_Io *io);
void *userdata;
};
typedef struct THEORAPLAY_Decoder THEORAPLAY_Decoder;
/* YV12 is YCrCb, not YCbCr; that's what SDL uses for YV12 overlays. */
typedef enum THEORAPLAY_VideoFormat
{
THEORAPLAY_VIDFMT_YV12, /* NTSC colorspace, planar YCrCb 4:2:0 */
THEORAPLAY_VIDFMT_IYUV, /* NTSC colorspace, planar YCbCr 4:2:0 */
THEORAPLAY_VIDFMT_RGB, /* 24 bits packed pixel RGB */
THEORAPLAY_VIDFMT_RGBA /* 32 bits packed pixel RGBA (full alpha). */
} THEORAPLAY_VideoFormat;
typedef struct THEORAPLAY_VideoFrame
{
unsigned int playms;
double fps;
unsigned int width;
unsigned int height;
THEORAPLAY_VideoFormat format;
unsigned char *pixels;
struct THEORAPLAY_VideoFrame *next;
} THEORAPLAY_VideoFrame;
typedef struct THEORAPLAY_AudioPacket
{
unsigned int playms; /* playback start time in milliseconds. */
int channels;
int freq;
int frames;
float *samples; /* frames * channels float32 samples. */
struct THEORAPLAY_AudioPacket *next;
} THEORAPLAY_AudioPacket;
THEORAPLAY_Decoder *THEORAPLAY_startDecodeFile(const char *fname,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt);
THEORAPLAY_Decoder *THEORAPLAY_startDecode(THEORAPLAY_Io *io,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt);
void THEORAPLAY_stopDecode(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_isDecoding(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_decodingError(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_isInitialized(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_hasVideoStream(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_hasAudioStream(THEORAPLAY_Decoder *decoder);
unsigned int THEORAPLAY_availableVideo(THEORAPLAY_Decoder *decoder);
unsigned int THEORAPLAY_availableAudio(THEORAPLAY_Decoder *decoder);
const THEORAPLAY_AudioPacket *THEORAPLAY_getAudio(THEORAPLAY_Decoder *decoder);
void THEORAPLAY_freeAudio(const THEORAPLAY_AudioPacket *item);
const THEORAPLAY_VideoFrame *THEORAPLAY_getVideo(THEORAPLAY_Decoder *decoder);
void THEORAPLAY_freeVideo(const THEORAPLAY_VideoFrame *item);
#ifdef __cplusplus
}
#endif
#endif /* include-once blocker. */
/* end of theoraplay.h ... */

View File

@@ -0,0 +1,74 @@
/**
* TheoraPlay; multithreaded Ogg Theora/Ogg Vorbis decoding.
*
* Please see the file LICENSE.txt in the source's root directory.
*
* This file written by Ryan C. Gordon.
*/
#if !THEORAPLAY_INTERNAL
#error Do not include this in your app. It is used internally by TheoraPlay.
#endif
static unsigned char *THEORAPLAY_CVT_FNNAME_420(const th_info *tinfo,
const th_ycbcr_buffer ycbcr)
{
const int w = tinfo->pic_width;
const int h = tinfo->pic_height;
unsigned char *pixels = (unsigned char *) malloc(w * h * 4);
if (pixels)
{
unsigned char *dst = pixels;
const int ystride = ycbcr[0].stride;
const int cbstride = ycbcr[1].stride;
const int crstride = ycbcr[2].stride;
const int yoff = (tinfo->pic_x & ~1) + ystride * (tinfo->pic_y & ~1);
const int cboff = (tinfo->pic_x / 2) + (cbstride) * (tinfo->pic_y / 2);
const unsigned char *py = ycbcr[0].data + yoff;
const unsigned char *pcb = ycbcr[1].data + cboff;
const unsigned char *pcr = ycbcr[2].data + cboff;
int posx, posy;
for (posy = 0; posy < h; posy++)
{
for (posx = 0; posx < w; posx++)
{
// http://www.theora.org/doc/Theora.pdf, 1.1 spec,
// chapter 4.2 (Y'CbCr -> Y'PbPr -> R'G'B')
// These constants apparently work for NTSC _and_ PAL/SECAM.
const float yoffset = 16.0f;
const float yexcursion = 219.0f;
const float cboffset = 128.0f;
const float cbexcursion = 224.0f;
const float croffset = 128.0f;
const float crexcursion = 224.0f;
const float kr = 0.299f;
const float kb = 0.114f;
const float y = (((float) py[posx]) - yoffset) / yexcursion;
const float pb = (((float) pcb[posx / 2]) - cboffset) / cbexcursion;
const float pr = (((float) pcr[posx / 2]) - croffset) / crexcursion;
const float r = (y + (2.0f * (1.0f - kr) * pr)) * 255.0f;
const float g = (y - ((2.0f * (((1.0f - kb) * kb) / ((1.0f - kb) - kr))) * pb) - ((2.0f * (((1.0f - kr) * kr) / ((1.0f - kb) - kr))) * pr)) * 255.0f;
const float b = (y + (2.0f * (1.0f - kb) * pb)) * 255.0f;
*(dst++) = (unsigned char) ((r < 0.0f) ? 0.0f : (r > 255.0f) ? 255.0f : r);
*(dst++) = (unsigned char) ((g < 0.0f) ? 0.0f : (g > 255.0f) ? 255.0f : g);
*(dst++) = (unsigned char) ((b < 0.0f) ? 0.0f : (b > 255.0f) ? 255.0f : b);
#if THEORAPLAY_CVT_RGB_ALPHA
*(dst++) = 0xFF;
#endif
} // for
// adjust to the start of the next line.
py += ystride;
pcb += cbstride * (posy % 2);
pcr += crstride * (posy % 2);
} // for
} // if
return pixels;
} // THEORAPLAY_CVT_FNNAME_420
// end of theoraplay_cvtrgb.h ...

View File

@@ -0,0 +1,822 @@
/**
* TheoraPlay; multithreaded Ogg Theora/Ogg Vorbis decoding.
*
* Please see the file LICENSE.txt in the source's root directory.
*
* This file written by Ryan C. Gordon.
*/
// I wrote this with a lot of peeking at the Theora example code in
// libtheora-1.1.1/examples/player_example.c, but this is all my own
// code.
#include "../rc_os_defines.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include "SDL.h"
#ifdef _WIN32
#include <windows.h>
#define THEORAPLAY_THREAD_T HANDLE
#define THEORAPLAY_MUTEX_T HANDLE
#define sleepms(x) Sleep(x)
#else
#include <pthread.h>
#include <unistd.h>
#define sleepms(x) usleep((x) * 1000)
#define THEORAPLAY_THREAD_T pthread_t
#define THEORAPLAY_MUTEX_T pthread_mutex_t
#endif
#include "theoraplay.h"
#include "theora/theoradec.h"
#include "ivorbiscodec.h"
#define THEORAPLAY_INTERNAL 1
typedef THEORAPLAY_VideoFrame VideoFrame;
typedef THEORAPLAY_AudioPacket AudioPacket;
// !!! FIXME: these all count on the pixel format being TH_PF_420 for now.
typedef unsigned char *(*ConvertVideoFrameFn)(const th_info *tinfo,
const th_ycbcr_buffer ycbcr);
static unsigned char *ConvertVideoFrame420ToYUVPlanar(
const th_info *tinfo, const th_ycbcr_buffer ycbcr,
const int p0, const int p1, const int p2)
{
int i;
const int w = tinfo->pic_width;
const int h = tinfo->pic_height;
const int yoff = (tinfo->pic_x & ~1) + ycbcr[0].stride * (tinfo->pic_y & ~1);
const int uvoff = (tinfo->pic_x / 2) + (ycbcr[1].stride) * (tinfo->pic_y / 2);
unsigned char *yuv = (unsigned char *) malloc(w * h * 2);
if (yuv)
{
unsigned char *dst = yuv;
for (i = 0; i < h; i++, dst += w)
memcpy(dst, ycbcr[p0].data + yoff + ycbcr[p0].stride * i, w);
for (i = 0; i < (h / 2); i++, dst += w/2)
memcpy(dst, ycbcr[p1].data + uvoff + ycbcr[p1].stride * i, w / 2);
for (i = 0; i < (h / 2); i++, dst += w/2)
memcpy(dst, ycbcr[p2].data + uvoff + ycbcr[p2].stride * i, w / 2);
} // if
return yuv;
} // ConvertVideoFrame420ToYUVPlanar
static unsigned char *ConvertVideoFrame420ToYV12(const th_info *tinfo,
const th_ycbcr_buffer ycbcr)
{
return ConvertVideoFrame420ToYUVPlanar(tinfo, ycbcr, 0, 2, 1);
} // ConvertVideoFrame420ToYV12
static unsigned char *ConvertVideoFrame420ToIYUV(const th_info *tinfo,
const th_ycbcr_buffer ycbcr)
{
return ConvertVideoFrame420ToYUVPlanar(tinfo, ycbcr, 0, 1, 2);
} // ConvertVideoFrame420ToIYUV
// RGB
#define THEORAPLAY_CVT_FNNAME_420 ConvertVideoFrame420ToRGB
#define THEORAPLAY_CVT_RGB_ALPHA 0
#include "theoraplay_cvtrgb.h"
#undef THEORAPLAY_CVT_RGB_ALPHA
#undef THEORAPLAY_CVT_FNNAME_420
// RGBA
#define THEORAPLAY_CVT_FNNAME_420 ConvertVideoFrame420ToRGBA
#define THEORAPLAY_CVT_RGB_ALPHA 1
#include "theoraplay_cvtrgb.h"
#undef THEORAPLAY_CVT_RGB_ALPHA
#undef THEORAPLAY_CVT_FNNAME_420
typedef struct TheoraDecoder
{
// Thread wrangling...
int thread_created;
THEORAPLAY_MUTEX_T lock;
volatile int halt;
int thread_done;
THEORAPLAY_THREAD_T worker;
// API state...
THEORAPLAY_Io *io;
unsigned int maxframes; // Max video frames to buffer.
volatile unsigned int prepped;
volatile unsigned int videocount; // currently buffered frames.
volatile unsigned int audioms; // currently buffered audio samples.
volatile int hasvideo;
volatile int hasaudio;
volatile int decode_error;
THEORAPLAY_VideoFormat vidfmt;
ConvertVideoFrameFn vidcvt;
VideoFrame *videolist;
VideoFrame *videolisttail;
AudioPacket *audiolist;
AudioPacket *audiolisttail;
} TheoraDecoder;
#ifdef _WIN32
static inline int Thread_Create(TheoraDecoder *ctx, void *(*routine) (void*))
{
ctx->worker = CreateThread(
NULL,
0,
(LPTHREAD_START_ROUTINE) routine,
(LPVOID) ctx,
0,
NULL
);
return (ctx->worker == NULL);
}
static inline void Thread_Join(THEORAPLAY_THREAD_T thread)
{
WaitForSingleObject(thread, INFINITE);
CloseHandle(thread);
}
static inline int Mutex_Create(TheoraDecoder *ctx)
{
ctx->lock = CreateMutex(NULL, FALSE, NULL);
return (ctx->lock == NULL);
}
static inline void Mutex_Destroy(THEORAPLAY_MUTEX_T mutex)
{
CloseHandle(mutex);
}
static inline void Mutex_Lock(THEORAPLAY_MUTEX_T mutex)
{
WaitForSingleObject(mutex, INFINITE);
}
static inline void Mutex_Unlock(THEORAPLAY_MUTEX_T mutex)
{
ReleaseMutex(mutex);
}
#else
static inline int Thread_Create(TheoraDecoder *ctx, void *(*routine) (void*))
{
return pthread_create(&ctx->worker, NULL, routine, ctx);
}
static inline void Thread_Join(THEORAPLAY_THREAD_T thread)
{
pthread_join(thread, NULL);
}
static inline int Mutex_Create(TheoraDecoder *ctx)
{
return pthread_mutex_init(&ctx->lock, NULL);
}
static inline void Mutex_Destroy(THEORAPLAY_MUTEX_T mutex)
{
pthread_mutex_destroy(&mutex);
}
static inline void Mutex_Lock(THEORAPLAY_MUTEX_T mutex)
{
pthread_mutex_lock(&mutex);
}
static inline void Mutex_Unlock(THEORAPLAY_MUTEX_T mutex)
{
pthread_mutex_unlock(&mutex);
}
#endif
static int FeedMoreOggData(THEORAPLAY_Io *io, ogg_sync_state *sync)
{
long buflen = 4096;
char *buffer = ogg_sync_buffer(sync, buflen);
if (buffer == NULL)
return -1;
buflen = io->read(io, buffer, buflen);
if (buflen <= 0)
return 0;
return (ogg_sync_wrote(sync, buflen) == 0) ? 1 : -1;
} // FeedMoreOggData
// This massive function is where all the effort happens.
static void WorkerThread(TheoraDecoder *ctx)
{
// make sure we initialized the stream before using pagein, but the stream
// will know to ignore pages that aren't meant for it, so pass to both.
#define queue_ogg_page(ctx) do { \
if (tpackets) ogg_stream_pagein(&tstream, &page); \
if (vpackets) ogg_stream_pagein(&vstream, &page); \
} while (0)
unsigned long audioframes = 0;
unsigned long videoframes = 0;
double fps = 0.0;
int was_error = 1; // resets to 0 at the end.
int eos = 0; // end of stream flag.
// Too much Ogg/Vorbis/Theora state...
ogg_packet packet;
ogg_sync_state sync;
ogg_page page;
int vpackets = 0;
vorbis_info vinfo;
vorbis_comment vcomment;
ogg_stream_state vstream;
int vdsp_init = 0;
vorbis_dsp_state vdsp;
int tpackets = 0;
th_info tinfo;
th_comment tcomment;
ogg_stream_state tstream;
int vblock_init = 0;
vorbis_block vblock;
th_dec_ctx *tdec = NULL;
th_setup_info *tsetup = NULL;
ogg_sync_init(&sync);
vorbis_info_init(&vinfo);
vorbis_comment_init(&vcomment);
th_comment_init(&tcomment);
th_info_init(&tinfo);
int bos = 1;
while (!ctx->halt && bos)
{
if (FeedMoreOggData(ctx->io, &sync) <= 0)
goto cleanup;
// parse out the initial header.
while ( (!ctx->halt) && (ogg_sync_pageout(&sync, &page) > 0) )
{
ogg_stream_state test;
if (!ogg_page_bos(&page)) // not a header.
{
queue_ogg_page(ctx);
bos = 0;
break;
} // if
ogg_stream_init(&test, ogg_page_serialno(&page));
ogg_stream_pagein(&test, &page);
ogg_stream_packetout(&test, &packet);
if (!tpackets && (th_decode_headerin(&tinfo, &tcomment, &tsetup, &packet) >= 0))
{
memcpy(&tstream, &test, sizeof (test));
tpackets = 1;
} // if
else if (!vpackets && (vorbis_synthesis_headerin(&vinfo, &vcomment, &packet) >= 0))
{
memcpy(&vstream, &test, sizeof (test));
vpackets = 1;
} // else if
else
{
// whatever it is, we don't care about it
ogg_stream_clear(&test);
} // else
} // while
} // while
// no audio OR video?
if (ctx->halt || (!vpackets && !tpackets))
goto cleanup;
// apparently there are two more theora and two more vorbis headers next.
while ((!ctx->halt) && ((tpackets && (tpackets < 3)) || (vpackets && (vpackets < 3))))
{
while (!ctx->halt && tpackets && (tpackets < 3))
{
if (ogg_stream_packetout(&tstream, &packet) != 1)
break; // get more data?
if (!th_decode_headerin(&tinfo, &tcomment, &tsetup, &packet))
goto cleanup;
tpackets++;
} // while
while (!ctx->halt && vpackets && (vpackets < 3))
{
if (ogg_stream_packetout(&vstream, &packet) != 1)
break; // get more data?
if (vorbis_synthesis_headerin(&vinfo, &vcomment, &packet))
goto cleanup;
vpackets++;
} // while
// get another page, try again?
if (ogg_sync_pageout(&sync, &page) > 0)
queue_ogg_page(ctx);
else if (FeedMoreOggData(ctx->io, &sync) <= 0)
goto cleanup;
} // while
// okay, now we have our streams, ready to set up decoding.
if (!ctx->halt && tpackets)
{
// th_decode_alloc() docs say to check for insanely large frames yourself.
if ((tinfo.frame_width > 99999) || (tinfo.frame_height > 99999))
goto cleanup;
// We treat "unspecified" as NTSC. *shrug*
if ( (tinfo.colorspace != TH_CS_UNSPECIFIED) &&
(tinfo.colorspace != TH_CS_ITU_REC_470M) &&
(tinfo.colorspace != TH_CS_ITU_REC_470BG) )
{
assert(0 && "Unsupported colorspace."); // !!! FIXME
goto cleanup;
} // if
if (tinfo.pixel_fmt != TH_PF_420) { assert(0); goto cleanup; } // !!! FIXME
if (tinfo.fps_denominator != 0)
fps = ((double) tinfo.fps_numerator) / ((double) tinfo.fps_denominator);
tdec = th_decode_alloc(&tinfo, tsetup);
if (!tdec) goto cleanup;
// Set decoder to maximum post-processing level.
// Theoretically we could try dropping this level if we're not keeping up.
int pp_level_max = 0;
// !!! FIXME: maybe an API to set this?
//th_decode_ctl(tdec, TH_DECCTL_GET_PPLEVEL_MAX, &pp_level_max, sizeof(pp_level_max));
th_decode_ctl(tdec, TH_DECCTL_SET_PPLEVEL, &pp_level_max, sizeof(pp_level_max));
} // if
// Done with this now.
if (tsetup != NULL)
{
th_setup_free(tsetup);
tsetup = NULL;
} // if
if (!ctx->halt && vpackets)
{
vdsp_init = (vorbis_synthesis_init(&vdsp, &vinfo) == 0);
if (!vdsp_init)
goto cleanup;
vblock_init = (vorbis_block_init(&vdsp, &vblock) == 0);
if (!vblock_init)
goto cleanup;
} // if
// Now we can start the actual decoding!
// Note that audio and video don't _HAVE_ to start simultaneously.
Mutex_Lock(ctx->lock);
ctx->prepped = 1;
ctx->hasvideo = (tpackets != 0);
ctx->hasaudio = (vpackets != 0);
Mutex_Unlock(ctx->lock);
while (!ctx->halt && !eos)
{
int need_pages = 0; // need more Ogg pages?
int saw_video_frame = 0;
// Try to read as much audio as we can at once. We limit the outer
// loop to one video frame and as much audio as we can eat.
while (!ctx->halt && vpackets)
{
ogg_int32_t **pcm = NULL;
const int frames = vorbis_synthesis_pcmout(&vdsp, &pcm);
if (frames > 0)
{
const int channels = vinfo.channels;
int chanidx, frameidx;
ogg_int32_t *samples;
AudioPacket *item = (AudioPacket *) malloc(sizeof (AudioPacket));
if (item == NULL) goto cleanup;
item->playms = (unsigned long) ((((double) audioframes) / ((double) vinfo.rate)) * 1000.0);
item->channels = channels;
item->freq = vinfo.rate;
item->frames = frames;
item->samples = (ogg_int32_t *) malloc(sizeof (ogg_int32_t) * frames * channels);
item->next = NULL;
if (item->samples == NULL)
{
free(item);
goto cleanup;
} // if
// I bet this beats the crap out of the CPU cache...
samples = item->samples;
for (frameidx = 0; frameidx < frames; frameidx++)
{
for (chanidx = 0; chanidx < channels; chanidx++)
*(samples++) = pcm[chanidx][frameidx];
} // for
vorbis_synthesis_read(&vdsp, frames); // we ate everything.
audioframes += frames;
//printf("Decoded %d frames of audio.\n", (int) frames);
Mutex_Lock(ctx->lock);
ctx->audioms += item->playms;
if (ctx->audiolisttail)
{
assert(ctx->audiolist);
ctx->audiolisttail->next = item;
} // if
else
{
assert(!ctx->audiolist);
ctx->audiolist = item;
} // else
ctx->audiolisttail = item;
Mutex_Unlock(ctx->lock);
} // if
else // no audio available left in current packet?
{
// try to feed another packet to the Vorbis stream...
if (ogg_stream_packetout(&vstream, &packet) <= 0)
{
if (!tpackets)
need_pages = 1; // no video, get more pages now.
break; // we'll get more pages when the video catches up.
} // if
else
{
if (vorbis_synthesis(&vblock, &packet) == 0)
vorbis_synthesis_blockin(&vdsp, &vblock);
} // else
} // else
} // while
if (!ctx->halt && tpackets)
{
// Theora, according to example_player.c, is
// "one [packet] in, one [frame] out."
if (ogg_stream_packetout(&tstream, &packet) <= 0)
need_pages = 1;
else
{
ogg_int64_t granulepos = 0;
const int rc = th_decode_packetin(tdec, &packet, &granulepos);
if (rc == TH_DUPFRAME)
videoframes++; // nothing else to do.
else if (rc == 0) // new frame!
{
th_ycbcr_buffer ycbcr;
if (th_decode_ycbcr_out(tdec, ycbcr) == 0)
{
VideoFrame *item = (VideoFrame *) malloc(sizeof (VideoFrame));
if (item == NULL) goto cleanup;
item->playms = (fps == 0) ? 0 : (unsigned int) ((((double) videoframes) / fps) * 1000.0);
item->fps = fps;
item->width = tinfo.pic_width;
item->height = tinfo.pic_height;
item->format = ctx->vidfmt;
item->pixels = ctx->vidcvt(&tinfo, ycbcr);
item->next = NULL;
if (item->pixels == NULL)
{
free(item);
goto cleanup;
} // if
//printf("Decoded another video frame.\n");
Mutex_Lock(ctx->lock);
if (ctx->videolisttail)
{
assert(ctx->videolist);
ctx->videolisttail->next = item;
} // if
else
{
assert(!ctx->videolist);
ctx->videolist = item;
} // else
ctx->videolisttail = item;
ctx->videocount++;
Mutex_Unlock(ctx->lock);
saw_video_frame = 1;
} // if
videoframes++;
} // if
} // else
} // if
if (!ctx->halt && need_pages)
{
const int rc = FeedMoreOggData(ctx->io, &sync);
if (rc == 0)
eos = 1; // end of stream
else if (rc < 0)
goto cleanup; // i/o error, etc.
else
{
while (!ctx->halt && (ogg_sync_pageout(&sync, &page) > 0))
queue_ogg_page(ctx);
} // else
} // if
// Sleep the process until we have space for more frames.
if (saw_video_frame)
{
int go_on = !ctx->halt;
//printf("Sleeping.\n");
while (go_on)
{
// !!! FIXME: This is stupid. I should use a semaphore for this.
Mutex_Lock(ctx->lock);
go_on = !ctx->halt && (ctx->videocount >= ctx->maxframes);
Mutex_Unlock(ctx->lock);
if (go_on)
sleepms(10);
} // while
//printf("Awake!\n");
} // if
} // while
was_error = 0;
cleanup:
ctx->decode_error = (!ctx->halt && was_error);
if (tdec != NULL) th_decode_free(tdec);
if (tsetup != NULL) th_setup_free(tsetup);
if (vblock_init) vorbis_block_clear(&vblock);
if (vdsp_init) vorbis_dsp_clear(&vdsp);
if (tpackets) ogg_stream_clear(&tstream);
if (vpackets) ogg_stream_clear(&vstream);
th_info_clear(&tinfo);
th_comment_clear(&tcomment);
vorbis_comment_clear(&vcomment);
vorbis_info_clear(&vinfo);
ogg_sync_clear(&sync);
ctx->io->close(ctx->io);
ctx->thread_done = 1;
} // WorkerThread
static void *WorkerThreadEntry(void *_this)
{
TheoraDecoder *ctx = (TheoraDecoder *) _this;
WorkerThread(ctx);
//printf("Worker thread is done.\n");
return NULL;
} // WorkerThreadEntry
static long IoFopenRead(THEORAPLAY_Io *io, void *buf, long buflen)
{
SDL_RWops *f = (SDL_RWops *) io->userdata;
const size_t br = SDL_RWread(f, buf, 1, buflen);
if (br == 0)
return -1;
return (long) br;
} // IoFopenRead
static void IoFopenClose(THEORAPLAY_Io *io)
{
SDL_RWops *f = (SDL_RWops *) io->userdata;
SDL_RWclose(f);
free(io);
} // IoFopenClose
THEORAPLAY_Decoder *THEORAPLAY_startDecodeFile(const char *fname,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt)
{
THEORAPLAY_Io *io = (THEORAPLAY_Io *) malloc(sizeof (THEORAPLAY_Io));
if (io == NULL)
return NULL;
SDL_RWops *f = SDL_RWFromFile(fname, "rb");
if (f == NULL)
{
free(io);
return NULL;
} // if
io->read = IoFopenRead;
io->close = IoFopenClose;
io->userdata = f;
return THEORAPLAY_startDecode(io, maxframes, vidfmt);
} // THEORAPLAY_startDecodeFile
THEORAPLAY_Decoder *THEORAPLAY_startDecode(THEORAPLAY_Io *io,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt)
{
TheoraDecoder *ctx = NULL;
ConvertVideoFrameFn vidcvt = NULL;
switch (vidfmt)
{
// !!! FIXME: current expects TH_PF_420.
#define VIDCVT(t) case THEORAPLAY_VIDFMT_##t: vidcvt = ConvertVideoFrame420To##t; break;
VIDCVT(YV12)
VIDCVT(IYUV)
VIDCVT(RGB)
VIDCVT(RGBA)
#undef VIDCVT
default: goto startdecode_failed; // invalid/unsupported format.
} // switch
ctx = (TheoraDecoder *) malloc(sizeof (TheoraDecoder));
if (ctx == NULL)
goto startdecode_failed;
memset(ctx, '\0', sizeof (TheoraDecoder));
ctx->maxframes = maxframes;
ctx->vidfmt = vidfmt;
ctx->vidcvt = vidcvt;
ctx->io = io;
if (Mutex_Create(ctx) == 0)
{
ctx->thread_created = (Thread_Create(ctx, WorkerThreadEntry) == 0);
if (ctx->thread_created)
return (THEORAPLAY_Decoder *) ctx;
} // if
Mutex_Destroy(ctx->lock);
startdecode_failed:
io->close(io);
free(ctx);
return NULL;
} // THEORAPLAY_startDecode
void THEORAPLAY_stopDecode(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
if (!ctx)
return;
if (ctx->thread_created)
{
ctx->halt = 1;
Thread_Join(ctx->worker);
Mutex_Destroy(ctx->lock);
} // if
VideoFrame *videolist = ctx->videolist;
while (videolist)
{
VideoFrame *next = videolist->next;
free(videolist->pixels);
free(videolist);
videolist = next;
} // while
AudioPacket *audiolist = ctx->audiolist;
while (audiolist)
{
AudioPacket *next = audiolist->next;
free(audiolist->samples);
free(audiolist);
audiolist = next;
} // while
free(ctx);
} // THEORAPLAY_stopDecode
int THEORAPLAY_isDecoding(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
int retval = 0;
if (ctx)
{
Mutex_Lock(ctx->lock);
retval = ( ctx && (ctx->audiolist || ctx->videolist ||
(ctx->thread_created && !ctx->thread_done)) );
Mutex_Unlock(ctx->lock);
} // if
return retval;
} // THEORAPLAY_isDecoding
#define GET_SYNCED_VALUE(typ, defval, decoder, member) \
TheoraDecoder *ctx = (TheoraDecoder *) decoder; \
typ retval = defval; \
if (ctx) { \
Mutex_Lock(ctx->lock); \
retval = ctx->member; \
Mutex_Unlock(ctx->lock); \
} \
return retval;
int THEORAPLAY_isInitialized(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, prepped);
} // THEORAPLAY_isInitialized
int THEORAPLAY_hasVideoStream(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, hasvideo);
} // THEORAPLAY_hasVideoStream
int THEORAPLAY_hasAudioStream(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, hasaudio);
} // THEORAPLAY_hasAudioStream
unsigned int THEORAPLAY_availableVideo(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(unsigned int, 0, decoder, videocount);
} // THEORAPLAY_hasAudioStream
unsigned int THEORAPLAY_availableAudio(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(unsigned int, 0, decoder, audioms);
} // THEORAPLAY_hasAudioStream
int THEORAPLAY_decodingError(THEORAPLAY_Decoder *decoder)
{
GET_SYNCED_VALUE(int, 0, decoder, decode_error);
} // THEORAPLAY_decodingError
const THEORAPLAY_AudioPacket *THEORAPLAY_getAudio(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
AudioPacket *retval;
Mutex_Lock(ctx->lock);
retval = ctx->audiolist;
if (retval)
{
ctx->audioms -= retval->playms;
ctx->audiolist = retval->next;
retval->next = NULL;
if (ctx->audiolist == NULL)
ctx->audiolisttail = NULL;
} // if
Mutex_Unlock(ctx->lock);
return retval;
} // THEORAPLAY_getAudio
void THEORAPLAY_freeAudio(const THEORAPLAY_AudioPacket *_item)
{
THEORAPLAY_AudioPacket *item = (THEORAPLAY_AudioPacket *) _item;
if (item != NULL)
{
assert(item->next == NULL);
free(item->samples);
free(item);
} // if
} // THEORAPLAY_freeAudio
const THEORAPLAY_VideoFrame *THEORAPLAY_getVideo(THEORAPLAY_Decoder *decoder)
{
TheoraDecoder *ctx = (TheoraDecoder *) decoder;
VideoFrame *retval;
Mutex_Lock(ctx->lock);
retval = ctx->videolist;
if (retval)
{
ctx->videolist = retval->next;
retval->next = NULL;
if (ctx->videolist == NULL)
ctx->videolisttail = NULL;
assert(ctx->videocount > 0);
ctx->videocount--;
} // if
Mutex_Unlock(ctx->lock);
return retval;
} // THEORAPLAY_getVideo
void THEORAPLAY_freeVideo(const THEORAPLAY_VideoFrame *_item)
{
THEORAPLAY_VideoFrame *item = (THEORAPLAY_VideoFrame *) _item;
if (item != NULL)
{
assert(item->next == NULL);
free(item->pixels);
free(item);
} // if
} // THEORAPLAY_freeVideo
// end of theoraplay.cpp ...

View File

@@ -0,0 +1,87 @@
/**
* TheoraPlay; multithreaded Ogg Theora/Ogg Vorbis decoding.
*
* Please see the file LICENSE.txt in the source's root directory.
*
* This file written by Ryan C. Gordon.
*/
#include "ivorbiscodec.h"
#ifndef _INCL_THEORAPLAY_H_
#define _INCL_THEORAPLAY_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct THEORAPLAY_Io THEORAPLAY_Io;
struct THEORAPLAY_Io
{
long (*read)(THEORAPLAY_Io *io, void *buf, long buflen);
void (*close)(THEORAPLAY_Io *io);
void *userdata;
};
typedef struct THEORAPLAY_Decoder THEORAPLAY_Decoder;
/* YV12 is YCrCb, not YCbCr; that's what SDL uses for YV12 overlays. */
typedef enum THEORAPLAY_VideoFormat
{
THEORAPLAY_VIDFMT_YV12, /* NTSC colorspace, planar YCrCb 4:2:0 */
THEORAPLAY_VIDFMT_IYUV, /* NTSC colorspace, planar YCbCr 4:2:0 */
THEORAPLAY_VIDFMT_RGB, /* 24 bits packed pixel RGB */
THEORAPLAY_VIDFMT_RGBA /* 32 bits packed pixel RGBA (full alpha). */
} THEORAPLAY_VideoFormat;
typedef struct THEORAPLAY_VideoFrame
{
unsigned int playms;
double fps;
unsigned int width;
unsigned int height;
THEORAPLAY_VideoFormat format;
unsigned char *pixels;
struct THEORAPLAY_VideoFrame *next;
} THEORAPLAY_VideoFrame;
typedef struct THEORAPLAY_AudioPacket
{
unsigned int playms; /* playback start time in milliseconds. */
int channels;
int freq;
int frames;
ogg_int32_t *samples; /* frames * channels float32 samples. */
struct THEORAPLAY_AudioPacket *next;
} THEORAPLAY_AudioPacket;
THEORAPLAY_Decoder *THEORAPLAY_startDecodeFile(const char *fname,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt);
THEORAPLAY_Decoder *THEORAPLAY_startDecode(THEORAPLAY_Io *io,
const unsigned int maxframes,
THEORAPLAY_VideoFormat vidfmt);
void THEORAPLAY_stopDecode(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_isDecoding(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_decodingError(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_isInitialized(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_hasVideoStream(THEORAPLAY_Decoder *decoder);
int THEORAPLAY_hasAudioStream(THEORAPLAY_Decoder *decoder);
unsigned int THEORAPLAY_availableVideo(THEORAPLAY_Decoder *decoder);
unsigned int THEORAPLAY_availableAudio(THEORAPLAY_Decoder *decoder);
const THEORAPLAY_AudioPacket *THEORAPLAY_getAudio(THEORAPLAY_Decoder *decoder);
void THEORAPLAY_freeAudio(const THEORAPLAY_AudioPacket *item);
const THEORAPLAY_VideoFrame *THEORAPLAY_getVideo(THEORAPLAY_Decoder *decoder);
void THEORAPLAY_freeVideo(const THEORAPLAY_VideoFrame *item);
#ifdef __cplusplus
}
#endif
#endif /* include-once blocker. */
/* end of theoraplay.h ... */

View File

@@ -0,0 +1,74 @@
/**
* TheoraPlay; multithreaded Ogg Theora/Ogg Vorbis decoding.
*
* Please see the file LICENSE.txt in the source's root directory.
*
* This file written by Ryan C. Gordon.
*/
#if !THEORAPLAY_INTERNAL
#error Do not include this in your app. It is used internally by TheoraPlay.
#endif
static unsigned char *THEORAPLAY_CVT_FNNAME_420(const th_info *tinfo,
const th_ycbcr_buffer ycbcr)
{
const int w = tinfo->pic_width;
const int h = tinfo->pic_height;
unsigned char *pixels = (unsigned char *) malloc(w * h * 4);
if (pixels)
{
unsigned char *dst = pixels;
const int ystride = ycbcr[0].stride;
const int cbstride = ycbcr[1].stride;
const int crstride = ycbcr[2].stride;
const int yoff = (tinfo->pic_x & ~1) + ystride * (tinfo->pic_y & ~1);
const int cboff = (tinfo->pic_x / 2) + (cbstride) * (tinfo->pic_y / 2);
const unsigned char *py = ycbcr[0].data + yoff;
const unsigned char *pcb = ycbcr[1].data + cboff;
const unsigned char *pcr = ycbcr[2].data + cboff;
int posx, posy;
for (posy = 0; posy < h; posy++)
{
for (posx = 0; posx < w; posx++)
{
// http://www.theora.org/doc/Theora.pdf, 1.1 spec,
// chapter 4.2 (Y'CbCr -> Y'PbPr -> R'G'B')
// These constants apparently work for NTSC _and_ PAL/SECAM.
const float yoffset = 16.0f;
const float yexcursion = 219.0f;
const float cboffset = 128.0f;
const float cbexcursion = 224.0f;
const float croffset = 128.0f;
const float crexcursion = 224.0f;
const float kr = 0.299f;
const float kb = 0.114f;
const float y = (((float) py[posx]) - yoffset) / yexcursion;
const float pb = (((float) pcb[posx / 2]) - cboffset) / cbexcursion;
const float pr = (((float) pcr[posx / 2]) - croffset) / crexcursion;
const float r = (y + (2.0f * (1.0f - kr) * pr)) * 255.0f;
const float g = (y - ((2.0f * (((1.0f - kb) * kb) / ((1.0f - kb) - kr))) * pb) - ((2.0f * (((1.0f - kr) * kr) / ((1.0f - kb) - kr))) * pr)) * 255.0f;
const float b = (y + (2.0f * (1.0f - kb) * pb)) * 255.0f;
*(dst++) = (unsigned char) ((r < 0.0f) ? 0.0f : (r > 255.0f) ? 255.0f : r);
*(dst++) = (unsigned char) ((g < 0.0f) ? 0.0f : (g > 255.0f) ? 255.0f : g);
*(dst++) = (unsigned char) ((b < 0.0f) ? 0.0f : (b > 255.0f) ? 255.0f : b);
#if THEORAPLAY_CVT_RGB_ALPHA
*(dst++) = 0xFF;
#endif
} // for
// adjust to the start of the next line.
py += ystride;
pcb += cbstride * (posy % 2);
pcr += crstride * (posy % 2);
} // for
} // if
return pixels;
} // THEORAPLAY_CVT_FNNAME_420
// end of theoraplay_cvtrgb.h ...