Processing2のユーザー専用[分野B イメージメディア:画像処理]

関連リンク

・情報メディア基礎ユニット1のサポートページ

・テキスト


・Processing言語の本家サイト

・Processingのマニュアル

・Processingのマニュアル(古いが日本語)


・ [Processing][仕様]外部ライブラリ(*.jar)の設置方法

1週目

ダウンロード・データ

・kisoUnit1B.zip

プログラム

CameraListup.pde
import processing.video.*;
	
void setup() {
  size(640, 480);
	
  String[] cameras = Capture.list();
	
  if (cameras.length == 0) {
    println("There are no Cameras available for capture.");
    exit();
  } else {
    println("Available cameras");
    for (int i = 0; i < cameras.length; i++) {
      println("[" + i + "] " + cameras[i]);
    }
  }
}
	
void draw() {}

Unit1B_w1_1.pde
import kisoUnit1B.*;
import processing.video.*;
	
Capture cap;
RGBHistogram hist;
	
int img_width = 640;
int img_height = 480;
	
boolean flagS = false;
	
void setup() {
  hist = new RGBHistogram(this);
  size(hist.getGraphAreaWidth(), hist.getGraphAreaHeight() + 480);
	
  String[] cameras = Capture.list();
  if (cameras.length == 0) {
    println("There are no Cameras available for capture.");
    exit();
  } else {
    println("Available cameras");
    for (int i = 0; i < cameras.length; i++) {
      println("[" + i + "] " + cameras[i]);
    }
  }
	
  cap = new Capture(this, cameras[1]);
  cap.start();
}
	
void keyPressed() {
  if (key == 's' ) {
    if (flagS == false) {
      flagS = true;
    } else {
      flagS = false;
    }
  } else if (key == 'q') {
    exit();
  }
}
	
void draw() {
  if (cap.available()) {
    cap.read();
  }
	
  if (flagS == false) {
    background(255, 255, 255);
    set(0, 0, cap);
    hist.loadImage(get(0, 0, img_width, img_height));
    hist.drawGraph(0, 480, 5.0);
  }
}

おまけ

何のライブラリも使わず、与えられた画像のヒストグラムをcsv形式のファイルに出力するプログラムです。

batch1.pde
PImage img;
int w;
int h;
int[] redH;
int[] greenH;
int[] blueH;
	
void setup() {
  img = loadImage("target.png");
  w = img.width;
  h = img.height;
  size(w, h);
	
  redH = new int[256];
  greenH = new int[256];
  blueH = new int[256];
	
  for (int y = 0; y < h; y++) {
    for (int x = 0; x < w; x++) {
      int redV = (int)(red(img.pixels[y*w + x]));
      int greenV = (int)(green(img.pixels[y*w + x]));
      int blueV = (int)(blue(img.pixels[y*w + x]));

      redH[redV]++;
      greenH[greenV]++;
      blueH[blueV]++;
    }
  }
	
  String[] s_redH = new String[256];
  String[] s_greenH = new String[256];
  String[] s_blueH = new String[256];
	
  for (int i = 0; i < 256; i++) {
    s_redH[i] = i + " ," + redH[i];
    s_greenH[i] = i + " ," + greenH[i];
    s_blueH[i] = i + " ," + blueH[i];
  }
  
  saveStrings("redHist.csv", s_redH);
  saveStrings("greenHist.csv", s_greenH);
  saveStrings("blueHist.csv", s_blueH);
  
  println("Save files!!");
}
	
void draw() {
  set(0, 0, img);
}

2週目

ダウンロード・データ

・opencv_processing.zip

サンプル画像


beer.jpg


test.bmp

プログラム

Unit1B_w2_1.pde
import gab.opencv.*;
	
OpenCV opencv;
PImage img, gray, f1;
double[] filter1 = {
  0.0, 0.0, 0.0, 
  0.0, 1.0, 0.0, 
  0.0, 0.0, 0.0
};
	
void setup() {
  img = loadImage("beer.jpg");
  size(img.width, img.height);
	
  opencv = new OpenCV(this, img);  
  gray = opencv.getSnapshot();
	
  opencv.filter2D(3, 3, filter1);
  f1 = opencv.getSnapshot();
	
  PFont font = createFont("MS-PMincho", 12);
  textFont(font);
}
	
void draw() {
  background(0);
	
  pushMatrix();
  scale(0.5);
  image(img, 0, 0);  
  image(gray, img.width, 0);
  image(f1, 0, img.height);
  popMatrix();
	
  String s1 = "元画像";
  String s2 = "グレースケール";
  String s3 = "フィルタ処理結果";
	
  fill(255, 0, 0);
  text(s1, img.width/2 - (textWidth(s1) + 10), 20 );
  text(s2, img.width - (textWidth(s2) + 10), 20 );
  text(s3, img.width/2 - (textWidth(s3) + 10), img.height/2 + 20 );
}

Unit1B_w2_2.pde
import gab.opencv.*;
import processing.video.*;
	
Capture cap;
OpenCV opencv;
	
int img_width = 640;
int img_height = 480;
	
PImage  img, gray, bin;
boolean flagS = false;
	
void setup() {
  size(img_width, img_height);
	
  String[] cameras = Capture.list();
	
  if (cameras.length == 0) {
    println("There are no cameras available for capture.");
    exit();
  } else {
    println("Available cameras:");
    for (int i = 0; i < cameras.length; i++) {
      println("[" + i + "] " + cameras[i]);
    }
	
    cap = new Capture(this, cameras[1]);
    cap.start();
    opencv = new OpenCV(this, img_width, img_height);
	
    PFont font = createFont("MS-PMincho", 12);
    textFont(font);
  }
}
	
void keyPressed() {
	
  if (key == 's') {
    if (flagS == false) {
      flagS = true;
    } else {
      flagS = false;
    }
  }
}
	
void draw() {
  if (flagS == true) {
    return;
  }
	
  if (cap.available()) 
  {
    cap.read();
  }
	
  set(0, 0, cap);
  img = get(0, 0, img_width, img_height);
	
  opencv.loadImage(img);
  gray = opencv.getSnapshot();
	
  opencv.threshold(128);
  bin = opencv.getSnapshot();
	
  background(0);
  pushMatrix();
  scale(0.5);
  image(img, 0, 0);  
  image(gray, img.width, 0);
  image(bin, 0, img.height);
  popMatrix();
	
  String s1 = "元画像";
  String s2 = "グレースケール";
  String s3 = "二値画像";
	
  fill(255, 0, 0);
  text(s1, img.width/2 - (textWidth(s1) + 10), 20 );
  text(s2, img.width - (textWidth(s2) + 10), 20 );
  text(s3, img.width/2 - (textWidth(s3) + 10), img.height/2 + 20 );
}

Unit1B_w2_3.pde
import gab.opencv.*;
import processing.video.*;
	
Capture cap;
OpenCV opencv;
	
int img_width = 640;
int img_height = 480;
	
PImage  img, gray1, gray2, diff;
boolean isFirst = true;
boolean flagS = false;
	
void setup() {
  size(img_width, img_height);
	
  String[] cameras = Capture.list();
	
  if (cameras.length == 0) {
    println("There are no cameras available for capture.");
    exit();
  } else {
    println("Available cameras:");
    for (int i = 0; i < cameras.length; i++) {
      println("[" + i + "] " + cameras[i]);
    }
	
    cap = new Capture(this, cameras[1]);
    cap.start();
    opencv = new OpenCV(this, img_width, img_height);
	
    PFont font = createFont("MS-PMincho", 12);
    textFont(font);
  }
}
	
void keyPressed() {
	
  if (key == 's') {
    if (flagS == false) {
      flagS = true;
    } else {
      flagS = false;
      isFirst = true;
    }
  }
}
	
void draw() {
  if (flagS == true) {
    return;
  }
	
  if (cap.available()) 
  {
    cap.read();
  }
	
  set(0, 0, cap);
  img = get(0, 0, img_width, img_height);
	
  opencv.loadImage(img);
  gray2 = opencv.getSnapshot();
	
  background(0);
	
  if (isFirst == false) {
    opencv.diff(gray1);
    diff = opencv.getSnapshot();
	
    pushMatrix();
    scale(0.5);
    image(gray1, 0, 0);  
    image(gray2, img.width, 0);
    image(diff, 0, img.height);
    popMatrix();
	
    String s1 = "時刻 t のグレースケール";
    String s2 = "時刻 t+Δt のグレースケール";
    String s3 = "差分画像";
	
    fill(255, 0, 0);
    text(s1, img.width/2 - (textWidth(s1) + 10), 20 );
    text(s2, img.width - (textWidth(s2) + 10), 20 );
    text(s3, img.width/2 - (textWidth(s3) + 10), img.height/2 + 20 );
  }
	
  isFirst = false;
  gray1 = gray2;
	
  delay(100);
}

ヒント

BGImgSave.pde
import processing.video.*;
	
Capture cap;
	
int img_width = 640;
int img_height = 480;
	
PImage  img;
boolean flagS = false;
	
void setup() {
  size(img_width, img_height);
	
  String[] cameras = Capture.list();
	
  if (cameras.length == 0) {
    println("There are no cameras available for capture.");
    exit();
  } else {
    println("Available cameras:");
    for (int i = 0; i < cameras.length; i++) {
      println("[" + i + "] " + cameras[i]);
    }
	
    cap = new Capture(this, cameras[1]);
    cap.start();
  }
}
	
void keyPressed() {
  if (key == ' ') {
    if (flagS == false) {
      flagS = true;
      img.save("BG.png");
    }
  }
}
	
void draw() {
  if (flagS == true) {
    return;
  }
	
  if (cap.available()) 
  {
    cap.read();
  }
	
  set(0, 0, cap);
  img = get(0, 0, img_width, img_height);
}