Skip to content

Commit

Permalink
1. Dead code removed - activeCount
Browse files Browse the repository at this point in the history
2. substantial bug with threading logic for camera rendering fixed. Now camera will only be rendered with latest states on demand
3. Camera rendering moved to mdlOutputs. So that the updated camera data is available right in current time step instead of the next.
4. minor refactoring
  • Loading branch information
vmanoj1996 committed Jun 17, 2023
1 parent 9b7c6e0 commit b52191d
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 26 deletions.
5 changes: 3 additions & 2 deletions src/mj.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ class MujocoModelInstance
// disable copy constructor
MujocoModelInstance(const MujocoModelInstance &mi);
public:
std::mutex dMutex; // mutex for model data access
binarySemp cameraSync; // semp for syncing main thread and render camera thread
std::mutex dMutex; // mutex for model data access

// cameras in the model instance
std::vector<std::shared_ptr<MujocoGUI>> offscreenCam;
Expand Down Expand Up @@ -108,6 +107,8 @@ class MujocoModelInstance
double lastRenderTime = 0;
double cameraRenderInterval = 0.020;
std::atomic<bool> isCameraDataNew = false;
binarySemp cameraSync; // semp for syncing main thread and render camera thread
std::atomic<bool> shouldCameraRenderNow = false;

void step(std::vector<double> u);
std::vector<double> getSensor(unsigned index);
Expand Down
53 changes: 29 additions & 24 deletions src/mj_sfun.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -442,17 +442,7 @@ static void mdlUpdate(SimStruct *S, int_T tid)

auto &miTemp = sd.mi[miIndex];

if(miTemp->offscreenCam.size() != 0)
{
if( (miTemp->get_d()->time - miTemp->lastRenderTime) > miTemp->cameraRenderInterval)
{
// maintain camera and physics in sync at required camera sample time
miTemp->cameraSync.acquire(); // blocking till offscreen buffer is rendered
miTemp->lastRenderTime = miTemp->get_d()->time;
}
}

// set control inputs
// Step the simulation by one discrete time step. Outputs (sensors and camera) get reflected in the next step
miTemp->step(uVec);
}

Expand Down Expand Up @@ -525,15 +515,13 @@ void renderingThreadFcn()
while(1)
{
// Visualization window(s)
int activeCount = 0;
for(int index=0; index<sd.mg.size(); index++)
{
auto duration = std::chrono::steady_clock::now() - sd.mg[index]->lastRenderClockTime;
if (duration>sd.mg[index]->renderInterval)
{
if(sd.mg[index]->loopInThread() == 0)
{
activeCount++;
sd.mg[index]->lastRenderClockTime = std::chrono::steady_clock::now();
}
}
Expand All @@ -544,23 +532,25 @@ void renderingThreadFcn()
{
auto &miTemp = sd.mi[miIndex];

if(miTemp->cameraSync.check_availability() == false)
if(miTemp->shouldCameraRenderNow == true)
{
// if rendering is already done and not consumed, dont do again
for(int camIndex = 0; camIndex<miTemp->offscreenCam.size(); camIndex++)
{
if(miTemp->offscreenCam[camIndex]->loopInThread() == 0)
auto status = miTemp->offscreenCam[camIndex]->loopInThread();
if(status == 0)
{
activeCount++;
miTemp->isCameraDataNew = true;
miTemp->lastRenderTime = miTemp->get_d()->time;
miTemp->isCameraDataNew = true; // Used to indicate that a new data is available for copying into blk output
}
}
miTemp->shouldCameraRenderNow = false;
miTemp->cameraSync.release();

}
}
// If there is nothing to render, donot keep spinning while loop
if(sd.signalThreadExit == true) break;
// if(activeCount == 0) break;
}

// Release visualization resources
Expand All @@ -584,16 +574,17 @@ void renderingThreadFcn()
static void mdlOutputs(SimStruct *S, int_T tid)
{
int miIndex = ssGetIWorkValue(S, MI_IW_IDX);
auto &miTemp = sd.mi[miIndex];

// Copy sensors to output
real_T *y = ssGetOutputPortRealSignal(S, SENSOR_PORT_INDEX);
int_T ny = ssGetOutputPortWidth(S, SENSOR_PORT_INDEX);
int_T index = 0;

auto nSensors = sd.mi[miIndex]->si.count;
auto nSensors = miTemp->si.count;
for(int_T i=0; i<nSensors; i++)
{
vector<double> yVec = sd.mi[miIndex]->getSensor(i);
vector<double> yVec = miTemp->getSensor(i);
for(auto elem: yVec)
{
y[index] = elem;
Expand All @@ -603,15 +594,29 @@ static void mdlOutputs(SimStruct *S, int_T tid)
}
y[index] = static_cast<double>(nSensors); // last element is a dummy to handle empty sensor case

// Render camera based on the current states. mdlupdate will be called after mdloutputs and update moves the time tk to tk+1
if(miTemp->offscreenCam.size() != 0)
{
double elapsedTimeSinceRender = miTemp->get_d()->time - miTemp->lastRenderTime;
if( elapsedTimeSinceRender > (miTemp->cameraRenderInterval-0.00001) )
{
// maintain camera and physics in sync at required camera sample time
miTemp->shouldCameraRenderNow = true;
miTemp->cameraSync.acquire(); // blocking till offscreen buffer is rendered

// ssPrintf("sim time=%lf & render time=%lf\n", miTemp->get_d()->time, miTemp->lastRenderTime);
}
}

// Copy camera to output
uint8_T *rgbOut = (uint8_T *) ssGetOutputPortSignal(S, RGB_PORT_INDEX);
real32_T *depthOut = (real32_T *) ssGetOutputPortSignal(S, DEPTH_PORT_INDEX);
if(sd.mi[miIndex]->isCameraDataNew)
if(miTemp->isCameraDataNew)
{
//avoid unnecessary memcpy. copy only when there is new data. Rest of the time steps, old data will be output
sd.mi[miIndex]->getCameraRGB((uint8_t *) rgbOut);
sd.mi[miIndex]->getCameraDepth((float *) depthOut);
sd.mi[miIndex]->isCameraDataNew = false;
miTemp->getCameraRGB((uint8_t *) rgbOut);
miTemp->getCameraDepth((float *) depthOut);
miTemp->isCameraDataNew = false;
}
}

Expand Down

0 comments on commit b52191d

Please sign in to comment.