/*	wakam.cpp

	Run this by something like ./wakam

	It may need to run as root because the /etc/udev/rules.d entry
	doesn't seem to work for this....

	January 2020, by H. Dietz
*/


//Uncomment the following line if you are compiling this code in Visual Studio
//#include "stdafx.h"

#include <opencv2/opencv.hpp>
#include <iostream>
#include <math.h>
#include <cstdio>

#include "WalabotAPI.h"
#include <stdio.h>
#include <string>

#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ipc.h>
#include <sys/shm.h>


#ifdef __LINUX__
 #define CONFIG_FILE_PATH "/etc/walabotsdk.conf"
#else
 #define CONFIG_FILE_PATH "C:\\Program Files\\Walabot\\WalabotSDK\\bin\\.config"
#endif

using namespace cv;
using namespace std;

const	char	*name360 = "Insta360 + Walabot Creator";
const	char	*nameraw = "Insta360 raw video";

#define	MINCONF	(1.0/255)	// minimum non-0 confidence
#define PI	3.1415926536
#define	VIEWANG	(180)
#define	HALFANG	(VIEWANG/2)
#define	TEMPANG	(8 * 7.5)	// approx view of thermal imager
#define	WALK	(10.0)
#define	WALKANG	(WALK * 7.5)	// walk covering view of thermal imager
#define	LOOKRAD	(15)		// look radius (square, actually) for alignment
#define	SHIFTPOINTS	1024	// number of point to examine to determine motion
#define	BADSHIFTQ	(SHIFTPOINTS * 3 * 256)	// bad (high) value for shiftq
#define	TICKAGE		(254.0 / 255.0)		// confidence drop per update
#define	CIRCDIA	(200)		// maximum circle diameter
#define	NEWCONF	(1.0)		// confidence of new data

int	cameraNum = 1;		// which /dev/video device?
int	noiseFilter = 0;	// filter-out painted values if there's noise
int	rawvout = 0;		// raw video out?
int	stitchCert = 0;		// how certain is stitch alignment?
int	verbose = 0;		// enable verbose output

#define CHECK_WALABOT_RESULT(result, func_name) \
{ \
 if (result != WALABOT_SUCCESS) \
 { \
 const char* errorStr = Walabot_GetErrorString(); \
 std::cout << std::endl << func_name << " error: " \
 << errorStr << std::endl; \
 std::cout << "Press enter to continue ..."; \
 std::string dummy; \
 std::getline(std::cin, dummy); \
 exit(1); \
 } \
}


#define	SHMRECS	1024

// nodes of shared memory circular buffer
// sending data from Walabot to 360 view
typedef	struct {
	int64_t		serial;
	SensorTarget	target;
} shmrec_t;

volatile shmrec_t	*shm;
int64_t			shmserial = 0;

void
shmstart(void)
{
	// Allocate System V shared memory circular buffer
	int shmid = shmget(IPC_PRIVATE,
			   (SHMRECS * sizeof(shmrec_t)),
			   (IPC_CREAT | 0600));

	// Attach to it
	shm = ((volatile shmrec_t *) shmat(shmid, 0, 0));

	// Set to self-destruct when all done
	shmctl(shmid, IPC_RMID, 0);

	// Clear all the serials
	for (int i=0; i<SHMRECS; ++i) {
		shm[i].serial = -1;
	}
}

void
sendTargets(SensorTarget *targets, int numTargets)
{
	for (int j=0; j<numTargets; ++j) {
		int i = (shmserial % SHMRECS);
		shm[i].target.xPosCm = targets[j].xPosCm;
		shm[i].target.yPosCm = targets[j].yPosCm;
		shm[i].target.zPosCm = targets[j].zPosCm;
		shm[i].target.amplitude = targets[j].amplitude;
		shm[i].serial = shmserial;
		++shmserial;
	}
}

volatile SensorTarget *
recvTarget(void)
{
	int i = (shmserial % SHMRECS);
	if (shm[i].serial >= shmserial) {
		shmserial = shm[i].serial + 1;
		return(&(shm[i].target));
	}
	return((SensorTarget *) 0);
}

void PrintSensorTargets(SensorTarget* targets, int numTargets)
{
	int targetIdx;

	sendTargets(targets, numTargets);

	if (verbose) {
		if (numTargets > 0) {
			for (targetIdx = 0; targetIdx < numTargets; targetIdx++) {
				printf("Target #%d: X=%lf Y=%lf Z=%lf amplitude=%lf\n",
				targetIdx,
				targets[targetIdx].xPosCm,
				targets[targetIdx].yPosCm,
				targets[targetIdx].zPosCm,
				targets[targetIdx].amplitude);
			}
		} else {
			printf("No target detected\n");
		}
	}
}


void SensorCode_SampleCode(void)
{
	WALABOT_RESULT res;
	// Walabot_GetSensorTargets - output parameters
	SensorTarget* targets;
	int numTargets;
	// Walabot_GetStatus - output parameters
	APP_STATUS appStatus;
	double calibrationProcess; // Percentage of calibration completed, if status is STATUS_CALIBRATING
	// Walabot_GetRawImageSlice - output parameters
	int *rasterImage;
	int sizeX;
	int sizeY;
	double sliceDepth;
	double power;

	// Initialize configuration
	// Walabot_SetArenaR - input parameters
	double minInCm = 30;
	double maxInCm = 200;
	double resICm = 3;
	// Walabot_SetArenaTheta - input parameters
	double minIndegrees = -15;
	double maxIndegrees = 15;
	double resIndegrees = 5;
	// Walabot_SetArenaPhi - input parameters
	double minPhiInDegrees = -60;
	double maxPhiInDegrees = 60;
	double resPhiInDegrees = 5;

	bool mtiMode = true;
	res = Walabot_Initialize(CONFIG_FILE_PATH);
	CHECK_WALABOT_RESULT(res, "Walabot_Initialize");
 
	// 1) Connect : Establish communication with Walabot.
	res = Walabot_ConnectAny();
	CHECK_WALABOT_RESULT(res, "Walabot_ConnectAny");

	// 2) Configure : Set scan profile and arena
	// -> Distance scanning through air; high-resolution images; slower capture rate 
	res = Walabot_SetProfile(PROF_SENSOR);
	CHECK_WALABOT_RESULT(res, "Walabot_SetProfile");

	// Setup arena - specify it by Cartesian coordinates(ranges and resolution on the x, y, z axes); 
	// In Sensor mode there is need to specify Spherical coordinates(ranges and resolution along radial distance and Theta and Phi angles).
	res = Walabot_SetArenaR(minInCm, maxInCm, resICm);
	CHECK_WALABOT_RESULT(res, "Walabot_SetArenaR");

	// Sets polar range and resolution of arena (parameters in degrees).
	res = Walabot_SetArenaTheta(minIndegrees, maxIndegrees, resIndegrees);
	CHECK_WALABOT_RESULT(res, "Walabot_SetArenaTheta");

	// Sets azimuth range and resolution of arena.(parameters in degrees).
	res = Walabot_SetArenaPhi(minPhiInDegrees, maxPhiInDegrees, resPhiInDegrees);
	CHECK_WALABOT_RESULT(res, "Walabot_SetArenaPhi");
	FILTER_TYPE filterType = mtiMode ?
		FILTER_TYPE_MTI : //Moving Target Identification: standard dynamic-imaging filter
		FILTER_TYPE_NONE;
	res = Walabot_SetDynamicImageFilter(filterType);
	CHECK_WALABOT_RESULT(res, "Walabot_SetDynamicImageFilter");

	// 3) Start: Start the system in preparation for scanning.
	res = Walabot_Start();
	CHECK_WALABOT_RESULT(res, "Walabot_Start");

	// 4) Start Calibration - only if MTI mode is not set - (there is no sense 
	// executing calibration when MTI is active)
	if (!mtiMode) {
		res = Walabot_StartCalibration();
		CHECK_WALABOT_RESULT(res, "Walabot_StartCalibration");
	}

	bool recording = true;
	while (recording) {
		// calibrates scanning to ignore or reduce the signals
		res = Walabot_GetStatus(&appStatus, &calibrationProcess);
		CHECK_WALABOT_RESULT(res, "Walabot_GetStatus");

		// 5) Trigger: Scan(sense) according to profile and record signals to be 
		// available for processing and retrieval.
		res = Walabot_Trigger();
		CHECK_WALABOT_RESULT(res, "Walabot_Trigger");

		// 6) Get action : retrieve the last completed triggered recording 
		res = Walabot_GetSensorTargets(&targets, &numTargets);
		CHECK_WALABOT_RESULT(res, "Walabot_GetSensorTargets");
//		res = Walabot_GetRawImageSlice(&rasterImage, &sizeX, &sizeY, &sliceDepth, &power);
//		CHECK_WALABOT_RESULT(res, "Walabot_GetRawImageSlice");

		// Update shared memory
		

		PrintSensorTargets(targets, numTargets);
	}

	// 7) Stop and Disconnect.
	res = Walabot_Stop();
	CHECK_WALABOT_RESULT(res, "Walabot_Stop");
	res = Walabot_Disconnect();
	CHECK_WALABOT_RESULT(res, "Walabot_Disconnect");
	Walabot_Clean();
	CHECK_WALABOT_RESULT(res, "Walabot_Clean");
}



Vec3b
frac2bgr(register float f)
{
	// Computes BGR from a fraction 0..1
	// 0 becomes blue, 1 is red
	// formula from NodeScape
	Vec3b c;
	uchar r, g, b;

	/* Only use colors to blue, red is hot */
	f = 0.8 - (f * 0.8);

	switch ((int) (f * 5)) {
	case 0:
		r = 255;
		g = 5 * 255 * f;
		b = 0;
		break;
	case 1:
		r = 5 * 255 * (0.4 - f);
		g = 255;
		b = 0;
		break;
	case 2:
		r = 0;
		g = 255;
		b = 5 * 255 * (f - 0.4);
		break;
	case 3:
		r = 0;
		g = 5 * 255 * (0.8 - f);
		b = 255;
		break;
	default:
		r = 5 * 255 * (f - 0.8);
		g = 0;
		b = 255;
	}

	c.val[0] = b;
	c.val[1] = g;
	c.val[2] = r;
	return(c);
}


Point2f
tofishdex(int x, int y, int tall)
{
	Point2f pfish;
	float theta, phi, r, r2;
	Point3f psph;
	float FOV =(float)PI/180 * 180;
	float FOV2 = (float)PI/180 * 180;
	float ftall = tall;
	// Active tall crop to get 180 degrees
	float atall = ftall * 0.92;

	// Polar angles
	theta = PI * (x / ftall - 0.5); // -pi/2 to pi/2
	phi = PI * (y / ftall - 0.5);  // -pi/2 to pi/2

	// Vector in 3D space
	psph.x = cos(phi) * sin(theta);
	psph.y = cos(phi) * cos(theta);
	psph.z = sin(phi);		// cylindrical about 92%

	// Calculate fisheye angle and radius
	theta = atan2(psph.z,psph.x);
	phi = atan2(sqrt(psph.x*psph.x + psph.z*psph.z), psph.y);

	r = atall * phi / FOV;
	r2 = atall * phi / FOV2;

	// Pixel in fisheye space
	pfish.x = 0.5 * atall + r * cos(theta) + ((ftall - atall) / 2);
	pfish.y = 0.5 * atall + r2 * sin(theta) + ((ftall - atall) / 2);
	return(pfish);
}


static struct {
	int x, y;
	float frac;
} mapcache[1504][3008];
static int hascache = 0;

Mat
fastmap(Mat frame)
{
	// cache-based fisheye remapping
	Mat map(frame.rows, frame.cols, CV_8UC3, Scalar(0));
	int off = (frame.rows / 2);
	int off2 = frame.rows + off;
	int mod = frame.rows * 2;

	if (!hascache) {
		for (int j=0; j<frame.rows; ++j) {
			for (int i=0; i<frame.rows; ++i) {
				Point2f fpos = tofishdex(i, j, frame.rows);
				float fraci = (fpos.x - frame.rows/2.0) / (frame.rows/2.0) / 0.92;
				float fracj = (fpos.y - frame.rows/2.0) / (frame.rows/2.0) / 0.92;
				fraci *= fraci;
				fracj *= fracj;
				float frac = sqrt(fraci + fracj);
				frac *= frac;
				frac *= frac;
				frac = 1.0 - frac;
				frac = (stitchCert / 100.0) + (frac * (1.0 - (stitchCert / 100.0)));
				frac = ((frac > 1.0) ? 1.0 : ((frac < 0) ? 0.0 : frac));

				mapcache[j][i + off].x = fpos.x;
				mapcache[j][i + off].y = fpos.y;
				mapcache[j][i + off].frac = frac;

				mapcache[j][(i + off2) % mod].x = fpos.x + frame.rows;
				mapcache[j][(i + off2) % mod].y = fpos.y;
				mapcache[j][(i + off2) % mod].frac = frac;
			}
		}
		hascache = 1;
	}

	// Perform the actual mapping
	for (int j=0; j<frame.rows; ++j) {
		for (int i=0; i<mod; ++i) {
			Vec3b color = frame.at<Vec3b>( Point(mapcache[j][i].x, mapcache[j][i].y) );
			float frac = mapcache[j][i].frac;
			float ifrac = 127.5 * (1.0 - frac);
			color.val[0] = (color.val[0] * frac) + ifrac;
			color.val[1] = (color.val[1] * frac) + ifrac;
			color.val[2] = (color.val[2] * frac) + ifrac;
			map.at<Vec3b>(Point(i, j)) = color;
		}
	}

	return(map);
}

float
shift(int &rdx, int &rdy, Mat one, Mat two)
{
	// determine pixel shift by sampling SHIFTPOINTS random positions
	// returns a quality metric, smaller value is better
	int mody = (one.rows / 2) - (4 * LOOKRAD);
	int modx = (one.rows / 2) - (4 * LOOKRAD); // could be cols, but limit to center
	int cy = one.rows / 2;
	int sub = LOOKRAD;
	float dif[2*LOOKRAD+1][2*LOOKRAD+1];

	for (int dy=0; dy<((2*LOOKRAD)+1); ++dy)
	for (int dx=0; dx<((2*LOOKRAD)+1); ++dx) {
		dif[dy][dx] = 0;
	}

	for (int k=0; k<SHIFTPOINTS; ++k) {
		int j = LOOKRAD + (rand() % mody);
		int i = LOOKRAD + (rand() % modx);
		Vec3b rgb1 = one.at<Vec3b>(LOOKRAD + j, LOOKRAD + i);
		float r1 = rgb1[0]; 
		float g1 = rgb1[1]; 
		float b1 = rgb1[2]; 

		for (int dy=0; dy<((2*LOOKRAD)+1); ++dy)
		for (int dx=0; dx<((2*LOOKRAD)+1); ++dx) {
			Vec3b rgb2 = two.at<Vec3b>(j + dy, i + dx);
			float r2 = r1 - rgb2[0];
			float g2 = g1 - rgb2[1];
			float b2 = b1 - rgb2[2]; 
			dif[dy][dx] += (r2 * r2) + (g2 * g2) + (b2 * b2);
		}
	}

	float lo = dif[LOOKRAD][LOOKRAD];
	int bestdy = LOOKRAD;
	int bestdx = LOOKRAD;
	for (int dy=0; dy<((2*LOOKRAD)+1); ++dy)
	for (int dx=0; dx<((2*LOOKRAD)+1); ++dx) {
		if (dif[dy][dx] < lo) {
			lo = dif[dy][dx];
			bestdy = dy;
			bestdx = dx;
		}
	}

	rdy = bestdy - LOOKRAD;
	rdx = bestdx - LOOKRAD;
}


int
main(int argc, char** argv)
{
	Mat frame;
	Mat map;
	Mat oldmap;
	Mat fmap;
	int firsttime = 1;
	float mintemp = 255;
	float maxtemp = 0;

	// command line args
	for (int i=1; i<argc; ++i) {
		char *p = argv[i];
 
		if (*p != '-') {
usage:
			fprintf(stderr,
"%s: Usage: %s -n -v\n"
"-c#\tCamera device, /dev/video# (default %d)\n"
"-n\tNoise filter image motion\n"
"-r\tEnable raw video out\n"
"-s#\tStitch certainty #%% (default %d)\n"
"-v\tVerbose output\n",
				argv[0],
				argv[0],
				cameraNum,
				stitchCert);
			exit(1);
		}

		while (*++p) {
			switch (*p) {
			case 'c':	cameraNum = atoi(p+1);
					while (p[1]) ++p;
					break;
			case 'n':	noiseFilter = 1;
					break;
			case 'r':	rawvout = 1;
					break;
			case 's':	stitchCert = atoi(p+1);
					stitchCert = ((stitchCert < 1) ? 0 : ((stitchCert > 100) ? 100 : stitchCert));
					while (p[1]) ++p;
					break;
			case 'v':	verbose = 1;
					break;
			default:
					goto usage;
			}
		}
	}

	// Create shared memory segment
	shmstart();

	// Run Walabot in a separate process
	if (!fork()) {
		SensorCode_SampleCode();
		exit(1);
	}

	//Open the default video camera
	VideoCapture cap(cameraNum);

	// if not success, exit program
	if (cap.isOpened() == false) {
		cout << "Cannot open the video camera" << endl;
		cin.get(); //wait for any key press
		return(-1);
	} 

	namedWindow(name360, WINDOW_NORMAL); //create a window for 360 painted view
	resizeWindow(name360, 480*2, 480);

	// read a video frame
	bool bSuccess = cap.read(frame); // read a new frame from video 

	// exit if the frame cannot be captured
	if (bSuccess == false) {
		cout << "Video camera is disconnected" << endl;
		cin.get(); //Wait for any key press
		exit(1);
	}

	if (rawvout) {
		namedWindow(nameraw, WINDOW_NORMAL); //create a window for raw view
		resizeWindow(nameraw, 480*2, 480);
		imshow(nameraw, frame);
	}

	map = fastmap(frame);
	imshow(name360, map);

	Mat conf(frame.rows, frame.cols, CV_32F, Scalar(0)); //confidence
	Mat temp(frame.rows, frame.cols, CV_32F, Scalar(1)); //temperature
	Mat ttemp(frame.rows, frame.cols, CV_32F, Scalar(2)); //temperature
	Mat tconf(frame.rows, frame.cols, CV_32F, Scalar(3)); //confidence

	while (1) {
		// read a video frame
		bool bSuccess = cap.read(frame); // read a new frame from video 

		//Breaking the while loop if the frames cannot be captured
		if (bSuccess == false) {
			cout << "Video camera is disconnected" << endl;
			cin.get(); //Wait for any key press
			break;
		}

		if (rawvout) {
			imshow(nameraw, frame);
		}

		// make color version of remapped
		map = fastmap(frame);
		if (firsttime) { map.copyTo(oldmap); firsttime=0; }

		// determine and apply shift
		int bestdx, bestdy;
		float shiftq = shift(bestdx, bestdy, oldmap, map);
		if (verbose) cout << "shift is " << bestdx << "," << bestdy << " quality " << shiftq << endl;

		// shift the previous temp & adjust confidence
		// confidence reduces with age & low confidence shifts
		float confmul = ((shiftq > BADSHIFTQ) ? 1.0 : (shiftq / BADSHIFTQ));
		confmul *= confmul;
		confmul *= confmul;
		confmul = TICKAGE * (1.0 - confmul);
		temp.copyTo(ttemp);
		conf.copyTo(tconf);
		for (int j=0; j<temp.rows; ++j) {
			int tj = (j + bestdy + temp.rows) % temp.rows;
			for (int i=0; i<temp.cols; ++i) {
				int ti = (i + bestdx + temp.cols) % temp.cols;
				float t = ttemp.at<float>(j, i);
				float c = tconf.at<float>(j, i);

				// also age by local motion
				if (noiseFilter && (c > MINCONF)) {
					Vec3b rgb0 = oldmap.at<Vec3b>(tj, ti);
					Vec3b rgb1 = map.at<Vec3b>(tj, ti);
					float b = (rgb0.val[0] - rgb1.val[0]) / 255.0;
					float g = (rgb0.val[1] - rgb1.val[1]) / 255.0;
					float r = (rgb0.val[2] - rgb1.val[2]) / 255.0;
					float d = (b * b) + (g * g) + (r * r);
					d = 1.0 - d;
					if (d > 0) c *= d;
				}

				if (c > MINCONF) {
					if (t < mintemp) mintemp = t;
					if (t > maxtemp) maxtemp = t;
				} else {
					t = 0;
					c = 0;
				}
				temp.at<float>(tj, ti) = t;
				conf.at<float>(tj, ti) = confmul * c;
			}
		}

		// done using old 360 map
		map.copyTo(oldmap);

		// add-in Walabot points
		volatile SensorTarget *targ;
		while (targ = recvTarget()) {
			// get the target info
			float cx = targ->xPosCm;
			float cy = targ->yPosCm;
			float cz = targ->zPosCm;
			float amp = targ->amplitude;

			// handle scaling
			cx *= (temp.rows * 60.0) / (180.0 * 50.0);
			cy *= (temp.rows * 15.0) / (180.0 * 50.0);
			cx += temp.rows;
			cy += (temp.rows / 2.0);
			amp *= 10000.0;
			int rad = ((amp > CIRCDIA) ? CIRCDIA : amp);
			float radsq = amp * amp;

			// update stats
			if (cz < mintemp) mintemp = cz;
			if (cz > maxtemp) maxtemp = cz;

			// for each pixel in box around circle
			for (int j= -rad; j<=rad; ++j)
			for (int i= -rad; i<=rad; ++i) {
				// if we're in the circle
				float sq = (j * j) + (i * i);
				if (sq <= radsq) {
					// inside, compute coordinates, value
					int jpos = ((int)(cy + j + temp.rows)) % temp.rows;
					int ipos = ((int)(cx + i + temp.cols)) % temp.cols;
//					int jpos = cy + j;
//					int ipos = cx + i;
					float w = 1.0 - (sq / radsq);
					w *= (w * w * w);

					// merge with previous values
					float ov = temp.at<float>(jpos, ipos);
					float om = conf.at<float>(jpos, ipos);
					ov = ((ov * om) + (cz * NEWCONF)) / (om + NEWCONF);
					om += NEWCONF;
					if (om > 1.0) om = 1.0;
					temp.at<float>(jpos, ipos) = ov;
					conf.at<float>(jpos, ipos) = om;
				}
			}
		}

		// now paint the temperatures
		map.copyTo(fmap);
		if (mintemp <= maxtemp) {
			float tempscale = 1.0 / (maxtemp - mintemp);
			for (int j=0; j<fmap.rows; ++j)
			for (int i=0; i<fmap.cols; ++i) {
				float c = conf.at<float>(j, i);

				// only bother painting if some confidence
				if (c > (1.0 / 255.0)) {
					float t = tempscale * (temp.at<float>(j, i) - mintemp);
					Vec3b tbgr = frac2bgr(t);
					Vec3b bgr = fmap.at<Vec3b>(j, i);
//					if (c > 1) c = 1;	// should be true already
					c = (c * c * c) / 2.0;	// cube fraction to make shading more subtle
					bgr.val[0] = (bgr.val[0] * (1.0 - c)) + (tbgr.val[0] * c);
					bgr.val[1] = (bgr.val[1] * (1.0 - c)) + (tbgr.val[1] * c);
					bgr.val[2] = (bgr.val[2] * (1.0 - c)) + (tbgr.val[2] * c);
					fmap.at<Vec3b>(j, i) = bgr;
				}
			}
		}

		imshow(name360, fmap);

		// cause display to update
		waitKey(1);
	}
}
