Subversion Repositories shark

Rev

Blame | Last modification | View Log | RSS feed

// framegrabber stuffs

/* File name ......... : ELABOR.C
 * Project............ :
 * Object ............ :
 * Author ............ : Facchinetti Tullio
 * Language .......... : C
 * Compiler .......... : GNU C
 * Operative system .. : MS-DOS/HARTIK
 * Creation data ..... : 04/03/2000
 * Last modify ....... : 19/11/99
 */

#include "demo.h"
#include "pclab.h"

#include <kernel/func.h>
#include <modules/cabs.h>
#include <stdio.h>
#include <drivers/pxc.h>

#include "string.h"
#include "ll/i386/x-dos.h"
#include "modules/hartport.h"


#define LUNGH  35.


// extern struct Data_cab2 tmp;





static CAB frameCAB;    // CAB di deposito dgelle immagini
static CAB trackingCAB; // CAB di deposito delle info di tracking


int img_border    =  10; // 10;
int img_border_y  =  130; // new!!
int window_width  =  40; //40;
int window_height =  40; //40;
TPixel pix_threshold = 128;

// a 256 GRAYscale palette
WORD GRAY_palette[256];

// the image to be put on the screen
WORD converted_image[IMG_COL * IMG_ROW];

TDataObj sequence[N_FRAMES];


char scan_window_frame(TDataObj *data, TPixel *in_frame, \
                       unsigned int xc, unsigned int yc, int border, int border_y)
{
  unsigned long int offset;
  unsigned int i, j;
  TPixel pix;
  double sum_x = 0.0, sum_y = 0.0;
  unsigned int n_pix = 0;
  int x1, y1, x2, y2;  // Must be int!!!
  char found;

  data->x1 = N_COL;
  data->y1 = N_ROW;
  data->x2 = data->y2 = 0;
  data->xb = data->yb = -1;
  data->time_stamp = -1;

  found = 0;

  x1 = MAX_NUM((xc - window_width / 2), (border));
  y1 = MAX_NUM((yc - window_height / 2), (border_y));
  x2 = MIN_NUM((xc + window_width / 2), (N_COL - border));
  y2 = MIN_NUM((yc + window_height / 2), (N_ROW - border_y));

  for (i = y1; i < y2; i++) {
    for (j = x1; j < x2; j++) {
      offset = i * N_COL + j;
      pix = *(in_frame + offset);

#ifdef __BLACK_ON_WHITE
      // Pixel found (object is BLACK, background is WHITE)
      if (pix < pix_threshold) {
#else
      // Pixel found (object is WHITE, background is BLACK)
      if (pix > pix_threshold) {
#endif
        data->time_stamp = sys_gettime(NULL);
        found = 1;
        n_pix++;
        sum_x += j;
        sum_y += i;
//        *(in_frame + offset) = 0;
        if (i < data->y1)
          data->y1 = i;
        if (i > data->y2)
          data->y2 = i;
        if (j < data->x1)
          data->x1 = j;
        if (j > data->x2)
          data->x2 = j;

      } else {
//        *(in_frame + offset) = 255;
      }
    }
  }
  data->xb = sum_x / n_pix;
  data->yb = sum_y / n_pix;
  return(found);
}

char scan_all_frame(TDataObj *data, TPixel *in_frame)
{
  unsigned long int offset;
  unsigned int i, j;
  TPixel pix;
  double sum_x = 0.0, sum_y = 0.0;
  unsigned int n_pix = 0;
  char found;

  data->x1 = N_COL;
  data->y1 = N_ROW;
  data->x2 = data->y2 = 0;
  data->xb = data->yb = -1;
  data->time_stamp = -1;

  found = 0;

  // In a single image scanning it performs thresholding and computation
  for (i = img_border_y; i < N_ROW - img_border_y; i++) {
    for (j = img_border; j < N_COL - img_border; j++) {
      offset = i * N_COL + j;
      pix = *(in_frame + offset);

#ifdef __BLACK_ON_WHITE
      // Pixel found (object is BLACK, background is WHITE)
      if (pix < pix_threshold) {
#else
      // Pixel found (object is WHITE, background is BLACK)
      if (pix > pix_threshold) {
#endif
        data->time_stamp = sys_gettime(NULL);
        found = 1;
        n_pix++;
        sum_x += j;
        sum_y += i;
//        *(in_frame + offset) = 0;
        if (i < data->y1)
          data->y1 = i;
        if (i > data->y2)
          data->y2 = i;
        if (j < data->x1)
          data->x1 = j;
        if (j > data->x2)
          data->x2 = j;

      } else {
//        *(in_frame + offset) = 255;
      }
    }
  }
  data->xb = sum_x / n_pix;
  data->yb = sum_y / n_pix;
  return(found);
}

void tracking(int top_frame, int *track_x, int *track_y, int *int_vx, int *int_vy, int time_to)
{
  float vx, vy;

  vx = (float)(sequence[top_frame - 1].xb - sequence[top_frame - 2].xb) /
       (float)(sequence[top_frame - 1].time_stamp - sequence[top_frame - 2].time_stamp);
  vx *= 1000;

  vy = (float)(sequence[top_frame - 1].yb - sequence[top_frame - 2].yb) /
       (float)(sequence[top_frame - 1].time_stamp - sequence[top_frame - 2].time_stamp);
  vy *= 1000;

  *track_x = sequence[top_frame - 1].xb + vx * time_to;
  *track_y = sequence[top_frame - 1].yb + vy * time_to;

  *int_vx = vx * 1000;
  *int_vy = vy * 1000;
}

  char                found;
  TPixel              *grabber_frame;
  int                 top_frame = 0;
  TDataObj            current;
  TTracking           *track;
void init_tracking_task(void)
  {
    frameCAB = PXC_GetCab();

    grabber_frame = cab_getmes(frameCAB);

  // Executes first time
       found = scan_all_frame(&current, grabber_frame);
  if (found) {
    memcpy(&sequence[top_frame], &current, sizeof(TDataObj));
    top_frame++;
  }

  cab_unget(frameCAB, grabber_frame);
  }
TASK tracking_task(void *arg)
{
//  static unsigned int n_frame = 0;
/*  char                found;
  TPixel              *grabber_frame;
  int                 top_frame = 0;
  TDataObj            current;
  TTracking           *track;
  */
/*  frameCAB = PXC_GetCab();

  grabber_frame = cab_getmes(frameCAB);

  // Executes first time
  found = scan_all_frame(&current, grabber_frame);
  if (found) {
    memcpy(&sequence[top_frame], &current, sizeof(TDataObj));
    top_frame++;
  }
   cab_unget(frameCAB, grabber_frame);
   task_endcycle();
   */

  while (1) {
    // Acquisizione immagine corrente
    grabber_frame = (TPixel *)cab_getmes(frameCAB);
    track = (TTracking *)cab_reserve(trackingCAB);

    // Estrazione della nuova trasformata sul frame corrente
    if (found) {
      found = scan_window_frame(&current, grabber_frame, current.xb, current.yb, img_border,img_border_y);
    } else {
      found = scan_all_frame(&current, grabber_frame);
    }

    track->found = found;

    if (found) {
      if (top_frame < N_FRAMES) {
        memcpy(&sequence[top_frame], &current, sizeof(TDataObj));
        top_frame++;
      } else {
        top_frame = 0;
        memcpy(&sequence[top_frame], &current, sizeof(TDataObj));
      }

      track->top_frame = top_frame;
      memcpy(&track->current, &current, sizeof(TDataObj));

      if (top_frame > 1) {
        tracking(top_frame, &track->predx, &track->predy,
                            &track->vx, &track->vy, 100);
      }
    } else {
      track->top_frame = top_frame = 0;
    }




//  parte di acquisizione dal sensore x
    {
      float   y[2], yp[2],x3ist=0;
      long int k=0;
      char *pun2;
      struct Data_cab2 posiz;
      float yout1;


      task_nopreempt();
      yout1=ad_conv(11);
      task_preempt();

       {  // filtraggio dati ottenuti dal sensore di x
#define AVR 10
                  int i;
                  static float oyout1[AVR];
                  float avy;
                  static int index=0, flag=1;

                  if(flag==1) {
                    for(i=0; i<AVR;++i) { oyout1[i]=yout1;}
                    flag=0;
                  }
                  avy=0;
                  for(i=0;i<AVR;++i) { avy += oyout1[i]; }
                  avy /= AVR;
                  if(fabs(yout1-avy)>=prm.NOISE) { yout1=avy;}
                  oyout1[index]=yout1;
                  index = (index+1) % AVR ;
                }

#ifndef CAMERA_X
                x3ist=v2x(yout1);       
                y[0]=bass1(x3ist);
                yp[0] = bass2(dx(y[0]));
                posiz.x = y[0] ; posiz.y = yp[0] ;
#else
                posiz.x=bass1( -(LUNGH/2.0)+ ((float)(current.x1 + current.x2))*LUNGH/( 2.0 *N_COL)  );
                posiz.y=bass2(dx(posiz.x));
//              posiz.y=((float) (track->vx *LUNGH )) /(N_COL);
#endif
                pun2 = cab_reserve(cab2);
                memcpy(pun2, &posiz, sizeof(struct Data_cab2));
                cab_putmes(cab2, pun2);
    }

    // Release CABs
    cab_putmes(trackingCAB, (char *)track);
    cab_unget(frameCAB, grabber_frame);


    task_endcycle();
  }
}







/*
 *
 *
 *
 * Camera task
 *
 *
 *
 *
 */


TASK camera_task(void *arg)
{
  register int        i,j,col,row;
  static unsigned int n_frame = 0;
  TPixel              *grabber_frame;
  TTracking           *track;
//  char                st[600];

  // Inizializzazione del task
  frameCAB = PXC_GetCab();

  while (1) {
    n_frame++;


    /* Acquisizione immagine corrente */
    grabber_frame = cab_getmes(frameCAB);



    for (i=0; i<IMG_ROW; i++)
      for (j=0; j<IMG_COL; j++) {
        col = (j*(N_COL-1))/(IMG_COL-1);
        row = (i*(N_ROW-1))/(IMG_ROW-1);
        converted_image[i*IMG_COL+j] = GRAY_palette[*(grabber_frame+row*N_COL+col)];
      }

    // Release CAB
    cab_unget(frameCAB, grabber_frame);

    for (j=0; j<IMG_COL; j++) {
      converted_image[j] = GRAY_palette[0];
      converted_image[(IMG_ROW-1)*IMG_COL+j] = GRAY_palette[0];
    }
    /*  scrive l'immagine della camera su video!!!   */
#ifdef PLOTIMG
    mutex_lock(&mutex);
    grx_putimage(IMG_X, IMG_Y, IMG_X+IMG_COL-1, IMG_Y+IMG_ROW-1,(BYTE *)converted_image);
    mutex_unlock(&mutex);
#endif

      mutex_lock(&mutex);
        /*  prende dati sul tracking !!!   */
    track = (TTracking *)cab_getmes(trackingCAB);
        /*  scrive l'immagine del tracking  su video!!!   */
    if (track->found) {

      if (track->top_frame > 1) {
        int px, py;
        if (track->predx < img_border)
          px = img_border;
        else if (track->predx > N_COL-img_border)
          px = N_COL-img_border;
        else
          px = track->predx;

        if (track->predy < img_border_y)
          py = img_border_y;
        else if (track->predy > N_ROW-img_border_y)
          py = N_ROW-img_border_y;
        else
          py = track->predy;
#ifdef PLOTIMG
        grx_disc(IMG_X+(px*IMG_COL)/N_COL, IMG_Y+(py*IMG_ROW)/N_ROW, 3, 127);
        grx_rect(IMG_X+(track->current.x1*IMG_COL)/N_COL, IMG_Y+(track->current.y1*IMG_ROW)/N_ROW, IMG_X+(track->current.x2*IMG_COL)/N_COL, IMG_Y+(track->current.y2*IMG_ROW)/N_ROW, 127);


//        grx_disc(IMG_X+(px*2)/3, IMG_Y+(py*2)/3, 3, 127);
//        grx_rect(IMG_X+(track->current.x1*2)/3, IMG_Y+(track->current.y1*2)/3, IMG_X+(track->current.x2*2)/3, IMG_Y+(track->current.y2*2)/3, 127);

#endif
      }
    }
    cab_unget(trackingCAB, (char *)track);
      mutex_unlock(&mutex);

    task_endcycle();
  }
}


/*
 *
 *
 *
 * Framegrabber Initialization
 *
 *
 *
 *
 */

void start_listener(void); //(TIME p);

void framegrabber_close(void *arg)
{
  PXC_Close();
}

void init_framegrabber(void)
{
  int i;
  KEY_EVT my_key;
  TIME period;

  // Aggiusta la palette
    for (i = 0; i < 256; i++)
      GRAY_palette[i] = rgb16(i,i,i);
  period = PXC_Initiate(4);

  if (!period) {
    grx_close();
    cprintf("Problemi nell'inizializz. del framegrabber. premi un tasto \n");
    keyb_getchar();
        halt();
    sys_end();
  }
  else {
    TTracking *trdata;
    // tracking CAB init
    trackingCAB = cab_create("trackingCAB", sizeof(TTracking), 3);
    trdata = (TTracking *)cab_reserve(trackingCAB);
    trdata->found = 0;
    cab_putmes(trackingCAB, (char *)trdata);


    PXC_Start();
    

    init_tracking_task();
  //  start_listener();
  }
  sys_atrunlevel(framegrabber_close, NULL, RUNLEVEL_BEFORE_EXIT);
}

/* crea i TASK  tracking_task e camera_task */

/*
void start_listener(void) //(TIME period)
{
  SOFT_TASK_MODEL m1, m2;
  HARD_TASK_MODEL m3;



#ifdef SOFTPROG
  soft_task_default_model(m1);
  soft_task_def_level(m1,1);
  soft_task_def_met(m1,WCET_TRACKING);
  soft_task_def_usemath(m1);

  soft_task_def_period(m1,(PERIOD_TRACKING));
  soft_task_def_group(m1,1);
  soft_task_def_ctrl_jet(m1);
//  soft_task_def_skip_arrivals(m1);
  tracking_PID = task_create("track", tracking_task, &m1, NULL);
  if (tracking_PID == -1) {
    sys_end();
    exit(4);
  }
#else
  hard_task_default_model(m3);
  //hard_task_def_level(m3,1);
  hard_task_def_wcet(m3,WCET_TRACKING);
  hard_task_def_mit(m3,(PERIOD_TRACKING));
  hard_task_def_usemath(m3);

  hard_task_def_group(m3,1);
  hard_task_def_ctrl_jet(m3);
  tracking_PID = task_create("track", tracking_task, &m3, NULL);
  if (tracking_PID == -1) {
    sys_end();
    exit(4);
  }
#endif


  soft_task_default_model(m2);
  soft_task_def_level(m2,1);
  soft_task_def_met(m2,WCET_CAMERA);
  soft_task_def_usemath(m2);

  soft_task_def_period(m2,PERIOD_CAMERA);
  soft_task_def_group(m2,1);
  soft_task_def_ctrl_jet(m2);

  camera_PID = task_create("cam", camera_task, &m2, NULL);
  if (camera_PID == -1) {
    sys_end();
    exit(4);
  }


}


*/