|
iirc -- you want SetDIBitsToDevice and not BitBlt. You are using a Device Independent Bitmap and those do not automatically translate to Device Dependent Bitmaps.
|
|
|
|
|
Hi,
I am vb.net Developer and I am new in Directshow and C++.
Issue is:
I need to change a video file(change in bytes of file with hashing or ....)to be playable only with my own software.As if it was copied by someone not playable.
so I need to write player for turn back file to original format in playing time,as I know I must do it via direct show and Async File source Filter please Help me and tell me the solution.
Best Regards
|
|
|
|
|
I am trying to use OpenGL to display the result of my rendering engine but I get a blank screen with no display. Below is my main c++ file with OpenGL calls and the file with the rendering engine. Please what am I doing wrongly
#define _CRT_SECURE_NO_WARNINGS
#include <windows.h>
#include <GL/glew.h>
#include <GL/glut.h>
#include "common.h"
#include "raytracer.h"
#include "scene.h"
#include "surface.h"
#include "cilk.h"
#include "cilkview.h"
#include <cstdio>
#include <cassert>
#include <vector>
#define SCRWIDTH 800
#define SCRHEIGHT 600
GLuint texture = 0;
Raytracer::Surface* surface = 0;
Pixel* buffer = 0;
Raytracer::Engine* tracer = 0;
void start(){
surface = new Raytracer::Surface( SCRWIDTH, SCRHEIGHT );
buffer = surface->GetBuffer();
surface->Clear( 0 );
surface->InitCharset();
surface->Print( "timings:", 2, 2, 0xffffffff );
tracer = new Raytracer::Engine();
tracer->GetScene()->InitScene();
tracer->SetTarget( surface->GetBuffer(), SCRWIDTH, SCRHEIGHT );
int tpos = 60;
FILE *outFile;
outFile = fopen("RayTracing.out","a");
if (!outFile){
printf("Cannot open output file");
exit(1);
}
fprintf (outFile, "Tracing Depth : %d\n", TRACEDEPTH);
while (1)
{
int fstart = GetTickCount();
tracer->InitRender();
while (!tracer->Render()) glutSwapBuffers();
int ftime = GetTickCount() - fstart;
char t[] = "00:00.000";
t[6] = (ftime / 100) % 10 + '0';
t[7] = (ftime / 10) % 10 + '0';
t[8] = (ftime % 10) + '0';
int secs = (ftime / 1000) % 60;
int mins = (ftime / 60000) % 100;
t[3] = ((secs / 10) % 10) + '0';
t[4] = (secs % 10) + '0';
t[1] = (mins % 10) + '0';
t[0] = ((mins / 10) % 10) + '0';
float par_time = ftime / 1000.f;
fprintf (outFile, "%.3f\n", par_time);
fflush (stdout);
surface->Print( t, tpos, 2, 0xffffffff );
tpos += 100;
}
}
void render(void) {
start();
glClear(GL_COLOR_BUFFER_BIT);
glRasterPos2i(0, 0);
glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, texture);
glDrawPixels(SCRWIDTH, SCRHEIGHT, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
glutSwapBuffers();
glutPostRedisplay();
}
void reshape(int x, int y) {
glViewport(0, 0, x, y);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);
}
void appInit(int w, int h) {
glGenBuffersARB(1, &texture);
glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, texture);
glBufferDataARB(GL_PIXEL_UNPACK_BUFFER_ARB, w * h * sizeof(GLubyte) * 4, 0, GL_STREAM_DRAW_ARB);
glBindBufferARB(GL_PIXEL_UNPACK_BUFFER_ARB, 0);
}
void idle() { glutPostRedisplay(); }
void cleanup(){
glDeleteBuffersARB(1, &texture);
}
int main(int argc, char **argv) {
atexit(cleanup);
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB);
glutInitWindowPosition(100, 100);
glutInitWindowSize(SCRWIDTH, SCRHEIGHT);
glutCreateWindow("Cilk Plus raytracer");
glewInit();
appInit(SCRWIDTH, SCRHEIGHT);
glutDisplayFunc(render);
glutIdleFunc(idle);
glutReshapeFunc(reshape);
glutMainLoop();
return 0;
}
raytrace.cpp
#include "raytracer.h"
#include "scene.h"
#include "common.h"
#include "windows.h"
#include "winbase.h"
#include "cilk.h"
namespace Raytracer {
Ray::Ray( vector3& a_Origin, vector3& a_Dir ) :
m_Origin( a_Origin ),
m_Direction( a_Dir )
{
}
Engine::Engine()
{
m_Scene = new Scene();
}
Engine::~Engine()
{
delete m_Scene;
}
void Engine::SetTarget( Pixel* a_Dest, int a_Width, int a_Height )
{
m_Dest = a_Dest;
m_Width = a_Width;
m_Height = a_Height;
}
Primitive* Engine::Raytrace( Ray& a_Ray, Color& a_Acc, int a_Depth, float a_RIndex, float& a_Dist )
{
if (a_Depth > TRACEDEPTH) return 0;
a_Dist = 1000000.0f;
vector3 pi;
Primitive* prim = 0;
int result;
for ( int s = 0; s < m_Scene->GetNrPrimitives(); s++ )
{
Primitive* pr = m_Scene->GetPrimitive( s );
int res;
if (res = pr->Intersect( a_Ray, a_Dist ))
{
prim = pr;
result = res; }
}
if (!prim) return 0;
if (prim) {
if (prim->IsLight())
{
a_Acc =Color( 1.0f, 1.0f, 1.0f );
}
else
{
pi = a_Ray.GetOrigin() + a_Ray.GetDirection() * a_Dist;
for ( int l = 0; l < m_Scene->GetNrPrimitives(); l++ )
{
Primitive* p = m_Scene->GetPrimitive( l );
if (p->IsLight())
{
Primitive* light = p;
float shade = 1.0f;
if (light->GetType() == Primitive::SPHERE)
{
vector3 L = ((Sphere*)light)->GetCentre() - pi;
float tdist = LENGTH( L );
NORMALIZE(L);
vector3 TempVector3(pi + L * EPSILON);
Ray r = Ray( TempVector3, L );
for ( int s = 0; s < m_Scene->GetNrPrimitives(); s++ )
{
Primitive* pr = m_Scene->GetPrimitive( s );
if ((pr != light) && (pr->Intersect( r, tdist )))
{
shade = 0;
break;
}
}
}
if (shade > 0)
{
vector3 L = ((Sphere*)light)->GetCentre() - pi;
NORMALIZE( L );
vector3 N = prim->GetNormal( pi );
if (prim->GetMaterial()->GetDiffuse() > 0)
{
float dot = DOT( L, N );
if (dot > 0)
{
float diff = dot * prim->GetMaterial()->GetDiffuse() * shade;
Color ncol = diff * prim->GetMaterial()->GetColor();
if (ncol.r > 1.0f || ncol.g > 1.0f || ncol.b > 1.0f)
{
float max = 1.0f;
if (ncol.r > max) max = ncol.r;
if (ncol.g > max) max = ncol.g;
if (ncol.b > max) max = ncol.b;
ncol *= 1.0f/max;
}
a_Acc += ncol;
}
}
if (prim->GetMaterial()->GetSpecular() > 0)
{
vector3 V = a_Ray.GetDirection();
vector3 R = L - 2.0f * DOT( L, N ) * N;
float dot = DOT( V, R );
if (dot > 0)
{
float spec = powf( dot, 20 ) * prim->GetMaterial()->GetSpecular() * shade;
Color ncol = spec * light->GetMaterial()->GetColor();
if (ncol.r > 1.0f) ncol.r = 1.0f; else if (ncol.r < 0.0f) ncol.r = 0.0f;
if (ncol.g > 1.0f) ncol.g = 1.0f; else if (ncol.g < 0.0f) ncol.g = 0.0f;
if (ncol.b > 1.0f) ncol.b = 1.0f; else if (ncol.b < 0.0f) ncol.b = 0.0f;
a_Acc += ncol;
}
}
}
}
}
if (a_Acc.r > 1.0f || a_Acc.g > 1.0f || a_Acc.b > 1.0f)
{
float max = 1.0f;
if (a_Acc.r > max) max = a_Acc.r;
if (a_Acc.g > max) max = a_Acc.g;
if (a_Acc.b > max) max = a_Acc.b;
a_Acc *= 1.0f/max;
}
float refl = prim->GetMaterial()->GetReflection();
if ((refl > 0.0f) && (a_Depth < TRACEDEPTH))
{
vector3 N = prim->GetNormal( pi );
vector3 R = a_Ray.GetDirection() - 2.0f * DOT( a_Ray.GetDirection(), N ) * N;
Color rcol( 0.0f, 0.0f, 0.0f );
float dist;
vector3 TempVector3(pi + R * EPSILON);
Ray TempRay(TempVector3, R);
Raytrace( TempRay, rcol, a_Depth + 1, a_RIndex, dist );
a_Acc += refl * rcol;
}
}
}
else {
}
return prim;
}
void Engine::InitRender()
{
m_CurrLine = 20;
m_PPos = 20 * m_Width;
m_WX1 = -4, m_WX2 = 4, m_WY1 = m_SY = 3, m_WY2 = -3;
m_DX = (m_WX2 - m_WX1) / m_Width;
m_DY = (m_WY2 - m_WY1) / m_Height;
m_SY += 20 * m_DY;
m_LastRow = new Primitive*[m_Width];
memset( m_LastRow, 0, m_Width * 4 );
m_RayMissesCount = 0;
m_RayIntersectionsCount = 0;
}
void Engine::PreRender() {
m_spheres.clear();
m_planes.clear();
Sphere* tempSphere;
PlanePrim* tempPlane;
int sphereCount;
int planesCount;
Primitive* prim;
for (int i = 0; i < m_Scene->GetNrPrimitives(); i++) {
prim = m_Scene->GetPrimitive(i);
tempSphere = dynamic_cast<Sphere*> (prim);
if (dynamic_cast<Sphere*> (prim) != 0){
m_spheres.push_back(tempSphere);
}
tempPlane = dynamic_cast<PlanePrim*> (prim);
if (dynamic_cast<PlanePrim*> (prim) != 0)
m_planes.push_back(tempPlane);
}
m_sphereDataStructCpp = new SphereDataStructCpp(m_spheres.size());
for (int i = 0; i < m_spheres.size(); i++) {
tempSphere = m_spheres[i];
m_sphereDataStructCpp->centerX[i] = tempSphere->GetCentre().x;
m_sphereDataStructCpp->centerY[i] = tempSphere->GetCentre().y;
m_sphereDataStructCpp->centerZ[i] = tempSphere->GetCentre().z;
m_sphereDataStructCpp->recRadius[i] = tempSphere->GetRecRadius();
m_sphereDataStructCpp->sqRadius[i] = tempSphere->GetSqRadius();
m_sphereDataStructCpp->diffuse[i] = tempSphere->GetMaterial()->GetDiffuse();
m_sphereDataStructCpp->specular[i] = tempSphere->GetMaterial()->GetSpecular();
m_sphereDataStructCpp->reflection[i] = tempSphere->GetMaterial()->GetReflection();
m_sphereDataStructCpp->refraction[i] = tempSphere->GetMaterial()->GetRefraction();
m_sphereDataStructCpp->refrIndex[i] = tempSphere->GetMaterial()->GetRefrIndex();
m_sphereDataStructCpp->red[i] = tempSphere->GetMaterial()->GetColor().r;
m_sphereDataStructCpp->green[i] = tempSphere->GetMaterial()->GetColor().g;
m_sphereDataStructCpp->blue[i] = tempSphere->GetMaterial()->GetColor().b;
m_sphereDataStructCpp->isLight[i] = tempSphere->IsLight();
}
m_planeDataStructCpp = new PlaneDataStructCpp(m_planes.size());
for (int i = 0; i < m_planes.size(); i++) {
tempPlane = m_planes[i];
m_planeDataStructCpp->normalX[i] = tempPlane->GetNormal().x;
m_planeDataStructCpp->normalY[i] = tempPlane->GetNormal().y;
m_planeDataStructCpp->normalZ[i] = tempPlane->GetNormal().z;
m_planeDataStructCpp->d[i] = tempPlane->GetD();
m_planeDataStructCpp->diffuse[i] = tempPlane->GetMaterial()->GetDiffuse();
m_planeDataStructCpp->specular[i] = tempPlane->GetMaterial()->GetSpecular();
m_planeDataStructCpp->reflection[i] = tempPlane->GetMaterial()->GetReflection();
m_planeDataStructCpp->refraction[i] = tempPlane->GetMaterial()->GetRefraction();
m_planeDataStructCpp->refrIndex[i] = tempPlane->GetMaterial()->GetRefrIndex();
m_planeDataStructCpp->red[i] = tempPlane->GetMaterial()->GetColor().r;
m_planeDataStructCpp->green[i] = tempPlane->GetMaterial()->GetColor().g;
m_planeDataStructCpp->blue[i] = tempPlane->GetMaterial()->GetColor().b;
m_planeDataStructCpp->isLight[i] = tempPlane->IsLight();
}
m_RayMissesCount = 0;
m_RayIntersectionsCount = 0;
m_statisticsDataStructCpp = new StatisticsDataStructCpp((size_t)m_CohortPSize);
}
bool Engine::Render()
{
vector3 o( 0, 0, -5 );
int msecs = GetTickCount();
Primitive* lastprim = 0;
for ( int y = m_CurrLine; y < (m_Height - 20); y++ )
{
m_SX = m_WX1;
for ( int x = 0; x < m_Width; x++ )
{
Color acc( 0, 0, 0 );
vector3 dir = vector3( m_SX, m_SY, 0 ) - o;
NORMALIZE( dir );
Ray r( o, dir );
float dist;
Primitive* prim = Raytrace( r, acc, 1, 1.0f, dist );
int red = (int)(acc.r * 256);
int green = (int)(acc.g * 256);
int blue = (int)(acc.b * 256);
if (red > 255) red = 255;
if (green > 255) green = 255;
if (blue > 255) blue = 255;
m_Dest[m_PPos++] = (red << 16) + (green << 8) + blue;
m_SX += m_DX;
}
m_SY += m_DY;
if ((GetTickCount() - msecs) > 100)
{
m_CurrLine = y + 1;
return false;
}
}
return true;
}
};
|
|
|
|
|
I understand the need to convert RGB to gray scale for further processing.
If I am after edge detection should I convert all colors equally - 1/3 of value or should I adjust the colors so it looks natural?
Thanks for reading
Cheers
Vaclav
|
|
|
|
|
The recommended way is to convert RGB to YUV then use the Y channel for gray scale.
Y = 0.299 * R + 0.587 * G + 0.114 * B
Cheers,
Peter
Software rusts. Simon Stephenson, ca 1994. So does this signature. me, 2012
|
|
|
|
|
I have a working DirectShow graph using "still" pin on video source filter.
I have set both hardware and software triggering. So far using only the hardware trigger.
It works but I have to push the camera button twice to get the snap shot.
It takes only onle push in control panel wizard to snap a picture.
Any suggestion what is missing / wrong in my code?
I run both preview and capture via smart T filter - so I got three video rendered when it all runs.
Cheers Vaclav
|
|
|
|
|
Hello
I would like display non English text on GLUI control such as button anf listbox
in the run, the controls show empty without any text.
while the program work well if I write English text on button
|
|
|
|
|
GLUI? Never heard of that. Google showed me http://glui.sourceforge.net/[^] - do you think of that?
Well, then look at that number: "Status July 2006" - 8 Years old. Did they know what is Unicode? And that's what you need: support for Unicode.
|
|
|
|
|
Hello friends, how is the day going? Please, I need someone to refer me to a link that I can successfully download direct x8. The one I found is always showing error in downloading. Thanks
|
|
|
|
|
Really DirectX 8?
As far as I can remember this is for Win 98/ME.
I found a Microsoft link for non-english versions of 8.1: http://www.microsoft.com/de-de/download/details.aspx?id=10830[^].
You may also check the media of old games. They often provide DirectX installers.
I would not download executables from untrusted sites. The risk of compromising your system is very high.
|
|
|
|
|
Hi,
I am using MFC. I want to draw a color filled object (for example, circle) to show on the top of everything else?
Best,
|
|
|
|
|
First of all, I'm sorry if this isn't the right forum to post this kind of question.
Does someone know if there's still a community for SharpDX after the official forum closed? a place where I could ask specific questions and look at what other people are doing with SharpDX.
I know that xoofx had a good reason to close the forum, but I kind of feel lost trying to understand the differences between the DirectX and the SharpDX api's.
|
|
|
|
|
They mentioned few places for your questions(in the below post), check it out
SharpDX - Closing the forum [^]
thatrajaCode converters | Education Needed
No thanks, I am all stocked up. - Luc Pattyn
When you're wrestling a gorilla, you don't stop when you're tired, you stop when the gorilla is - Henry Minute
|
|
|
|
|
Thanks a lot, I saw that post, and it makes me sad.
The main problems I have with SharpDX are not related to DirectX, but rather with all the moved cheese on the API.
It's hard to guess if a CreateXXX function on the DirectX API has become:
- A static Create or New method on the XXX type.
- A constructor on the XXX type.
- A static Create method on a factory type.
- A instance method on a factory type.
- A instance method on another type that is related in some way to XXX.
To make it even harder, some types offer more than one or two of the above options, while others seen to offer none. Thats were i wanted help from the comunity.
And on top of all, the toolkit isn't even suported, making it work is just guess work and the hope that something relates to XNA on some way.
That makes me sad, because i do like SharpDX and love all of the conveniences that were added, like the conversion operators.
|
|
|
|
|
Hi
I am trying to separate boundaries in an video or image
Eg putting green lines which can separate the boundaries of the image so that objects in the image can be differentiated
Does anyone know what this process is called
I saw it done in videos before am is trying to program it myself
Please help
|
|
|
|
|
I'm loading a image into the graphics of a form, but the quality is very poor. How to improve? The original image is 48*48 (96dpi). Here's my code:
public partial class Form1 : Form
{
private ImageList ObjImageList;
private int OPERSTEP_SIZE = 48;
public Form1()
{
InitializeComponent();
ObjImageList = new ImageList();
ObjImageList.Images.Add("prodstep", Image.FromFile("ProdStep_BW.gif"));
this.Paint += new PaintEventHandler(Form1_Paint);
}
void Form1_Paint(object sender, PaintEventArgs e)
{
Graphics g = e.Graphics;
g.DrawImage(ObjImageList.Images["prodstep"], new Rectangle(20, 20, OPERSTEP_SIZE, OPERSTEP_SIZE));
}
}
|
|
|
|
|
The easiest way would probably be to use an image with a higher resolution than 48*48 but still display it as 48*48 - Are you sure the displayed image on the form is still 48*48?
cheers,
Marco
|
|
|
|
|
|
Thanks for the information, i will check it out!
|
|
|
|
|
you can use directx or gdi +
tell me more about your question to help u .
|
|
|
|
|
install graphics drivers on ur pc
Raisal
|
|
|
|
|
I did not find in the OP's question that he / she was having general problems with the quality of his / her display. The OP made it very clear that when he / she loads an image with a 48x48 dimension that the image he / she loaded was of poor quality.
It is important that you understand the question before you post an absurd reply like you have just did. You should be ashamed of yourself.
|
|
|
|
|
An image with a dimension of 48x48 literally means that you have an image consisting of 48 * 48 pixels. If you load that image into a control with an Image property, and the said control's dimension are bigger than that of the image itself, the image will be stretched (zoomed) to fit the dimension of the control.
However, in a Picturebox you can control how an image is displayed - zoomed, stretched, centered, etc. As mentioned before, try and use a higher resolution image. Higher resolution means a higher number of pixels. That will give you an image file with a greater dimension. The control you are using the display the image (other than a picturebox) will fit the image according to its own dimensions and not that of the image.
Make sure that the control's dimensions are relative to that of the image to prevent the image from being distorted.
|
|
|
|
|
I want to know whether opcode can give me the messages besides triangle. As mesh defined in the opcode is consist of triangles, but most of my model is constructed using polygons, so before my collision qurey, I have to transform the polygons to triangles, but i want to know wether i could know which polygon the triangles detected are belong to. if the anwser is yes, how can i get the message?
I had got the resource from the OPCODE net address, and i also have read the mannual about the OPCODE, but as I wanted to learn some details about the OPCODE, I found it was difficult. And i had got the two projects "opcode" and "CDTestFramework" from that address. because i had not found the information about how to get which polygon the detected triangle is belong to in the user mannual. i hoped to find my anwser in the two projects above, but I find it was difficult to read the code in the two projects. if you had some experience about using OPCODE, i really wished you could give some suggestion about how to read the two projects. And as the most important one of my requests is to get the polygons, so if you know, can you tell me is there any manner i can get the polygons. If the answer is NO, i think I have to think about take us of other detection manners.
|
|
|
|
|
|