using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Xml.Schema;
using Point = OpenCvSharp.Point;
using Size = OpenCvSharp.Size;
namespace TargetMeasureObject
{
class NormalProcessor : Object
{
private const int WARNING_PANEL_WIDTH = 500;
private const int WARNING_PANEL_HEIGHT = 300;
private static Scalar WARNING_PANEL_BACKGROUND = Scalar.Black;
private static Scalar WARNING_PANEL_FOREGROUND = Scalar.Red;
private static Mat WarnShowPanel = new Mat();
private static Mat _sourceImage;
private static Mat _sourceGrayImage;
public static string WINDOW_NAME = string.Empty;
public static Mat SourceGrayImage
{
get {
return _sourceGrayImage; }
set {
_sourceGrayImage = value; }
}
public static Mat SourceImage
{
get {
return _sourceImage; }
set {
_sourceImage = value; }
}
internal class Pair : ProcesserOutData.Pair
{
public Pair()
{
M1 = null;
M2 = null;
}
public Pair(object _m1, object _m2)
{
M1 = _m1;
M2 = _m2;
}
}
internal class ImageHistMatchResult : ProcesserOutData.ImageHistgramMatchResult {
}
internal class ImageGeometricData : ProcesserOutData.ImageGeometricData {
}
internal class ImageGeometricScore : ProcesserOutData.ImageGeometricScore {
}
internal class ImageGeometricMatchResult : ProcesserOutData.ImageGeometricMatchResult {
}
internal class ImageFeatureData : ProcesserOutData.ImageFeatureData {
}
internal class ImageFeatureScore : ProcesserOutData.ImageFeatureScore {
}
internal class ImageConnectedFieldData : ProcesserOutData.ImageConnectedFieldData {
}
internal class ImageEdgePtInform : ProcesserOutData.ImageEdgePtInform {
}
internal class PixelFeature : ProcesserOutData.PixelFeature
{
public PixelFeature(int cPixel,int aPixel,Point pos,int posDiff,int negDiff )
{
CenPixel = cPixel;
AvrPixel = aPixel;
Position = pos;
PosPixDiff = posDiff;
NegPixDiff = negDiff;
}
public PixelFeature(int cPixel, int aPixel, Point pos, int posDiff)
{
CenPixel = cPixel;
AvrPixel = aPixel;
Position = pos;
PosPixDiff = posDiff;
}
public PixelFeature()
{
CenPixel = 0;
AvrPixel = 0;
Position = new Point(0,0);
PosPixDiff = 0;
NegPixDiff = 0;
}
}
internal class ImageBlobData : ProcesserOutData.ImageBlobData
{
public ImageBlobData()
{
}
public ImageBlobData(Mat[] initHist, Mat[] normHist)
{
HistgramInitialDatas = initHist;
HistgramNormalizedDatas = normHist;
}
public ImageBlobData(Mat[] initHist, Mat[] normHist,int[] pixel,double[] ratio)
{
HistgramInitialDatas = initHist;
HistgramNormalizedDatas = normHist;
HistgramPeakPixel = pixel;
HistgramPeakRatio = ratio;
}
}
public static void ImageShow(string _winName,Mat _img,double _zoomScale=1)
{
Mat _uImg = _img.Clone();
if(_uImg is null && string.IsNullOrEmpty(_winName))
{
return ;
}
if (_zoomScale != 1)
{
Cv2.Resize(_uImg, _uImg, new Size(0, 0), fx: _zoomScale, fy: _zoomScale, InterpolationFlags.Nearest);
}
Cv2.ImShow(_winName, _uImg);
KeyDownProcess();
}
public static void ImageStorage()
{
}
public static Mat ImageRead(string _filePath, bool gray = false)
{
if (string.IsNullOrEmpty(_filePath))
{
return null;
}
_sourceImage = new Mat();
_sourceGrayImage = new Mat();
_sourceImage = Cv2.ImRead(_filePath);
_sourceGrayImage = Cv2.ImRead(_filePath, ImreadModes.Grayscale);
if (!gray)
return SourceImage;
else
return SourceGrayImage;
}
public static void KeyDownProcess()
{
if (Cv2.WaitKey() == (int)ConsoleKey.Enter||Cv2.WaitKey()==(int)ConsoleKey.Escape)
{
Cv2.DestroyAllWindows();
}
}
static public Mat ImageAnnotation(Mat img, string texts, Point position, Scalar fontColor, double fontScale = 0.5,bool show=false)
{
Mat _uImg = img.Clone();
if (_uImg.Type() == MatType.CV_8UC1)
{
Cv2.Merge(new Mat[] {
_uImg, _uImg, _uImg }, _uImg);
}
_uImg.PutText(texts, position, HersheyFonts.HersheySimplex, fontScale, fontColor);
return _uImg;
}
static public Mat ImageAnnotation(Mat img, string[] texts,double fontSize=0.5,int fontInterval= 20)
{
Mat usedImg = img.Clone();
for (int i = 0; i < texts.Length; i++)
{
if (!string.IsNullOrEmpty(texts[i]))
{
Cv2.PutText(usedImg, texts[i], new Point(0, fontInterval * (i + 1)), HersheyFonts.HersheySimplex, fontSize, Scalar.White);
}
}
return usedImg;
}
static public Mat ImageAnnotation(Mat img, List<List<string>> texts, List<Point> points, int textDist = 20)
{
double _fontSize = 0.5;
Scalar _fontColor = Scalar.White;
Mat _outImg = img.Clone();
for (int i = 0; i < points.Count; i++)
{
for (int j = 0; j < texts[i].Count; j++)
{
Cv2.PutText(_outImg, texts[i][j], new Point(points[i].X, points[i].Y + textDist * j), HersheyFonts.HersheySimplex, _fontSize, _fontColor);
}
}
return _outImg;
}
static public Mat ImageCoupleStretch(ref Mat img1, ref Mat img2, bool origin = false, bool cSmall = false, bool _show = false)
{
Mat uImg1 = img1.Clone(), uImg2 = img2.Clone();
Size _setSize = new Size();
if (!cSmall)
{
_setSize = uImg1.Rows * uImg1.Cols > uImg2.Rows * uImg2.Cols ? uImg1.Size() : uImg2.Size();
}
else
{
_setSize = uImg1.Rows * uImg1.Cols < uImg2.Rows * uImg2.Cols ? uImg1.Size() : uImg2.Size();
}
Cv2.Resize(uImg2, uImg2, _setSize, interpolation: InterpolationFlags.Nearest);
Cv2.Resize(uImg1, uImg1, _setSize, interpolation: InterpolationFlags.Nearest);
img1 = origin ? uImg1 : img1;
img2 = origin ? uImg2 : img2;
return ImagesMerge(new Mat[] {
uImg1 }, uImg2, ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL, ENUMS.PLACE_ORIENTATION.LEFT, show: _show);
}
static public Mat ImagesMerge(Mat[] _subImg, Mat mainImg, ENUMS.IMAGE_PERMUTATION_TYPE permType=ENUMS.IMAGE_PERMUTATION_TYPE.VERTICAL, ENUMS.PLACE_ORIENTATION placeOiren = ENUMS.PLACE_ORIENTATION.LEFT, bool show = false,double showZoomRatio = 1)
{
Mat[] subImg = new Mat[_subImg.Length];
for (int i = 0; i < _subImg.Length; i++)
{
subImg[i] = _subImg[i].Clone(); ;
}
int inValidSubNum = 0;
for(int i = 0; i < subImg.Length; i++)
{
if(subImg[i]==null || subImg[i].Rows * subImg[i].Cols == 0)
{
inValidSubNum++;
}
}
bool mainInVlid = (mainImg == null || mainImg.Rows * mainImg.Cols == 0);
bool subInVlid = (inValidSubNum == subImg.Length);
Mat _outImg = new Mat();
Size _outSize = new Size();
MatType _outType = new MatType();
Mat _attachImg = new Mat();
Func<Mat[], ENUMS.IMAGE_PERMUTATION_TYPE, MatType, Mat> attachImageMerge = (imgs, perm, type) =>
{
Size size = new Size();
if (perm == ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL)
{
size.Height = imgs.ToList().OrderByDescending(img => img.Rows).First().Rows;
for (int i = 0; i < imgs.Length; i++) size.Width += imgs[i].Cols;
}
else
{
size.Width = imgs.ToList().OrderByDescending(img => img.Cols).First().Cols;
for (int i = 0; i < imgs.Length; i++) size.Height += imgs[i].Rows;
}
Mat o = new Mat(size, type, Scalar.Black);
Point placePos = new Point(0, 0);
for (int i = 0; i < imgs.Length; i++)
{
if (imgs[i] != null && imgs[i].Width * imgs[i].Height != 0)
{
if (imgs[i].Type() != type)
{
if (type == MatType.CV_8UC1)
{
imgs[i].ConvertTo(imgs[i], type);
}
else
{
Cv2.CvtColor(imgs[i], imgs[i], ColorConversionCodes.GRAY2RGB);
}
}
o[new Rect(placePos, imgs[i].Size())] = imgs[i];
placePos.X += (perm == ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL) ? imgs[i].Cols : 0;
placePos.Y += (perm == ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL) ? 0 : imgs[i].Rows;
}
}
return o;
};
if (mainInVlid && subInVlid)
{
return new Mat();
}
else if(!mainInVlid && subInVlid)
{
return mainImg.Clone();
}
else if (mainInVlid && !subInVlid)
{
_outType = mainInVlid ? MatType.CV_8UC3 : mainImg.Type();
_attachImg = attachImageMerge(subImg, permType, _outType);
_outImg = _attachImg.Clone();
}
else if (!mainInVlid && !subInVlid)
{
_outType = mainInVlid ? MatType.CV_8UC3 : mainImg.Type();
_attachImg = attachImageMerge(subImg, permType, _outType);
Mat _mainImg = mainImg.Clone();
_outSize = ((int)placeOiren <= 1) ?
new Size(Math.Max(_mainImg.Cols, _attachImg.Cols), (int)(_mainImg.Rows + _attachImg.Rows)) :
new Size((int)(_mainImg.Cols + _attachImg.Cols), Math.Max(_mainImg.Rows, _attachImg.Rows));
_outImg = new Mat(_outSize, _outType, Scalar.Black);
Point _attachPos = new Point();
Point _mainPos = new Point();
switch (placeOiren)
{
case ENUMS.PLACE_ORIENTATION.UP:
{
_attachPos = new Point(0, 0);
_mainPos = new Point(0, _attachImg.Rows);
break;
}
case ENUMS.PLACE_ORIENTATION.DOWN:
{
_attachPos = new Point(0, _mainImg.Rows);
_mainPos = new Point(0, 0);
break;
}
case ENUMS.PLACE_ORIENTATION.LEFT:
{
_attachPos = new Point(0, 0);
_mainPos = new Point(_attachImg.Cols, 0);
break;
}
case ENUMS.PLACE_ORIENTATION.RIGHT:
{
_attachPos = new Point(_mainImg.Cols, 0);
_mainPos = new Point(0, 0);
break;
}
}
_outImg[new Rect(_attachPos, _attachImg.Size())] = _attachImg;
_outImg[new Rect(_mainPos, _mainImg.Size())] = _mainImg;
}
if (show)
{
Mat _showImg = _outImg.Clone();
if(showZoomRatio!=1)
Cv2.Resize(_showImg, _showImg, new Size(0, 0), fx: showZoomRatio, fy: showZoomRatio);
ImageShow("JOINED IMAGE SHOWING", _showImg);
}
return _outImg;
}
static public Mat ImageCurveDrawing(Mat img, Point[] points, Scalar curColor, int curThick, bool show = false)
{
Mat panel = img.Clone();
for(int i = 1; i < points.Length; i++)
{
Cv2.Line(panel, points[i - 1], points[i], curColor, thickness: curThick);
}
if (show)
{
ImageShow("ImageCurveDrawing", panel);
}
return panel;
}
static public Mat ImageBasicLineDrawing(Mat img,Point point,ENUMS.IMAGE_PERMUTATION_TYPE orientation)
{
Mat _uImg = img.Clone(); ;
int width = _uImg.Width;
int height = _uImg.Height;
Point pt1 = new Point();
Point pt2 = new Point();
if (orientation == ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL)
{
pt1.X = 0;
pt1.Y = point.Y;
pt2.X = width;
pt2.Y = point.Y;
}
else
{
pt1.X = point.X;
pt1.Y = 0;
pt2.X = point.X;
pt2.Y = height;
}
Scalar lineColor = Scalar.LightGreen;
if (_uImg.Type() == MatType.CV_8UC1)
{
Cv2.Merge(new Mat[] {
_uImg, _uImg, _uImg }, _uImg);
}
_uImg.Line(pt1, pt2, lineColor, 1);
return _uImg;
}
static public Mat ImagePointCrossLineDrawing(Mat img,Point pos,int size,Scalar color)
{
Mat _uImag = img.Clone();
Point ptu = new Point(pos.X - size / 2, pos.Y);
Point ptd = new Point(pos.X + size / 2, pos.Y);
Point ptl = new Point(pos.X, pos.Y - size / 2);
Point ptr = new Point(pos.X, pos.Y + size / 2);
Cv2.Line(_uImag, ptu, ptd, color, 1);
Cv2.Line(_uImag, ptl, ptr, color, 1);
return _uImag;
}
static public Mat ImageGrayToColorfull(Mat img,ColormapTypes mapType=(ColormapTypes)9,bool _show=false)
{
Mat _outImg = new Mat();
Mat _uImg = img.Clone();
Cv2.ApplyColorMap(_uImg, _outImg, mapType);
if (_show)
{
ImageShow("ImageGrayToColorfull", _outImg);
}
return _outImg;
}
static public void ImageWarningShow(string warningContent)
{
WarnShowPanel = new Mat(new Size(WARNING_PANEL_WIDTH, WARNING_PANEL_HEIGHT), MatType.CV_8UC3, WARNING_PANEL_BACKGROUND);
Cv2.PutText(WarnShowPanel, warningContent, new Point(30, 30), HersheyFonts.HersheySimplex, 1, WARNING_PANEL_FOREGROUND, 2);
ImageShow("WarningMessage", WarnShowPanel);
}
static public Mat ImageGrayDetect(Mat img)
{
Mat _uImg = img.Clone();
if (img.Channels() != 1)
{
Cv2.CvtColor(_uImg, _uImg, ColorConversionCodes.BGR2GRAY);
}
return _uImg;
}
static public void ImageRoiRegionGet(Mat img,Rect[] subRects,out Mat[] outSubImgs,bool show=false)
{
outSubImgs = new Mat[] {
null };
if(img is null)
{
ImageWarningShow("ImageRoiRegionGetError:Args :img is null");
return;
}
Mat _uImg = img.Clone();
outSubImgs = new Mat[subRects.Length] ;
for(int i = 0; i < subRects.Length; i++)
{
outSubImgs[i] = new Mat(_uImg, subRects[i]);
}
}
static public Mat[] PairsMatQueues(List<Pair> pairs)
{
if (pairs.Count == 0)
{
return null;
}
List<Mat> _p2 = new List<Mat>();
for(int i = 0; i < pairs.Count; i++)
{
_p2.Add((Mat)pairs[i].M2);
}
return _p2.ToArray();
}
static public Dictionary<int,Scalar> MatchColorMaker(int _patterns)
{
Dictionary<int, Scalar> _out = new Dictionary<int, Scalar>();
for(int i = 0; i < _patterns; i++)
{
_out.Add(i, Scalar.RandomColor());
}
return _out;
}
static public Mat ImageRotate(Mat img,Point2f roCenter,double roAngle,double scale=1, bool show=false)
{
Mat _uImg = img.Clone();
Mat M = Cv2.GetRotationMatrix2D(roCenter, roAngle, 1);
var cos = Math.Abs(M.Get<double>(0, 0));
var sin = Math.Abs(M.Get<double>(0, 1));
int _oWidth = (int)(_uImg.Width * cos + _uImg.Height * sin);
int _oHeight = (int)(_uImg.Width * sin + _uImg.Height * cos);
double _offx = M.Get<double>(0, 2) + (_oWidth / 2) - roCenter.X;
double _offy = M.Get<double>(1, 2) + (_oHeight / 2) - roCenter.Y;
M.Set<double>(0, 2, _offx);
M.Set<double>(1, 2, _offy);
Cv2.WarpAffine(_uImg, _uImg, M, new Size(_oWidth, _oHeight));
if (show)
{
ImageShow("ImageRotateShowing", _uImg);
}
return _uImg;
}
/// <summary>
/// 计算图像清晰度
/// </summary>
/// <param name="img">输入图像</param>
/// <param name="type">计算类型0:灰度方差,1:sobel均值;2:laplacian均值</param>
/// <returns></returns>
public static double ImageSharpness(Mat img,int type)
{
double ret = 0;
switch (type)
{
case 0:
{
ret= img.Norm(NormTypes.L2);
break;
}
case 1:
{
Mat soble = img.Sobel(MatType.CV_64FC1, 3, 3, 5);
soble=soble.ConvertScaleAbs();
ret = Cv2.Mean(soble)[0];
break;
}
case 2:
{
Mat lapl = img.Laplacian(MatType.CV_64FC1, 5, 1);
lapl=lapl.ConvertScaleAbs();
ret = Cv2.Mean(lapl)[0];
break;
}
}
return ret;
}
}
}
c#图像常规处理
猜你喜欢
转载自blog.csdn.net/JAYLEE900/article/details/132508265
今日推荐
周排行