Ok,

so it is getting late over here, I will stop for today.

I am currently doing this - which is obviously not working, as the protocol header is not correct.

I am not copying the NSString correctly into my sendBuffer apparently - am I missing something obvious here?




- (BOOL)execute:(id <QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary *)arguments
{

if([connectSocket isDisconnected] || [self didValueForInputKeyChange:@"inputPort"] || [self didValueForInputKeyChange:@"inputHost"])
    {
        // stop listening for incomming connections
        [connectSocket disconnect];

if(![connectSocket connectToHost:self.inputIpAddressPixelmaster onPort:self.inputPort error:nil])
        {
            NSLog(@"could not connect to host, port");
            return NO;
        }
    }

    if([connectSocket isDisconnected])
    {
        NSLog(@"Not connected!");
        return NO;
    }


    // get the image to use
    id<QCPlugInInputImageSource> imageToUse = self.inputImage;

CGColorSpaceRef colorSpace = (CGColorSpaceGetModel([imageToUse imageColorSpace]) == kCGColorSpaceModelRGB ? [imageToUse imageColorSpace] : [context colorSpace]);





    ///////////////////////////////////////////////////////
    // Protocol setup from hereon in order to send data
    // for that to happen, we need to build the protocol
    ///////////////////////////////////////////////////////

    ///////////////////////////////////////////////////////
    // DEBUG: fixed data length (BAD! to be fixed)
    ///////////////////////////////////////////////////////
    //int lengthByte = 7235;
NSUInteger lengthByte = self.inputWidthLEDWall * self.inputHeightLEDWall * 3 + 35;

    ///////////////////////////////////////////////////////
    // set up the sendBuffer
    ///////////////////////////////////////////////////////
    //NSMutableData* sendBuffer = [[NSMutableData alloc] init];
    // retain apparently fixed my issue with the crash

    void* sendBuffer = malloc(lengthByte);




    // build the header
    NSString* magic = self.inputProtMagic;
[magic getCString:sendBuffer maxLength:[magic length] encoding:NSASCIIStringEncoding]; //[sendBuffer appendData:[magic dataUsingEncoding:NSASCIIStringEncoding]];



    //databytes per protocol
    Byte lengthBytes[2] = {lengthByte/256, lengthByte % 256};
    //[sendBuffer appendBytes:lengthBytes length:2 ] ;

    memcpy(&sendBuffer[[magic length]], lengthBytes, 2);


    // typeByte
    Byte typeByte = self.inputProtTypeByte;
    //[sendBuffer appendBytes:&typeByte length:1];
    memcpy(&sendBuffer[[magic length]+2],&typeByte, 1);


    // TBA MESSAGE ID (=A COUNTER)
    counter = self.inputProtMessageID;

    Byte messageID[2] = { counter/256, counter % 256};
    //[sendBuffer appendBytes:messageID length:2];
    memcpy(&sendBuffer[[magic length]+4], messageID, 2);


    //NSLog(@"connected %d¸", [connectSocket isConnected]);


    Byte filtersEnable = self.inputProtTypeByte;
    //[sendBuffer appendBytes:&filtersEnable length:1];
    memcpy(sendBuffer, &filtersEnable, 1);

    Byte fixImagesEnable = self.inputProtFixImagesEnable;
    //[sendBuffer appendBytes:&fixImagesEnable length:1];
    memcpy(sendBuffer, &fixImagesEnable, 1);


    // Byte 27 = 50 in the recorded stream, is this the fps  in Hz?

    Byte reservedBytes[5] = {
        self.inputProtReserved27Byte,
        self.inputProtReserved28Byte,
        self.inputProtReserved29Byte,
        self.inputProtReserved30Byte,
        self.inputProtReserved31Byte
        };

    //[sendBuffer appendBytes:reservedBytes length:5];
    memcpy(sendBuffer, reservedBytes, 5);









    // now append image data!
    //[sendBuffer appendBytes:[imgData bytes] length:[imgData length]];



if (![imageToUse lockBufferRepresentationWithPixelFormat:QCPlugInPixelFormatARGB8
colorSpace:colorSpace
forBounds:[imageToUse imageBounds]])
    {

        NSLog(@"Locking of image failed.");
        return NO;

    }

    ///////////////////////////////////////////////////////
    // we got the image locked now, read out pixel data ...
    ///////////////////////////////////////////////////////
    vImage_Buffer           buffer;
    vImage_Buffer           vDestBuffer;



NSBitmapImageRep* newRep = [[[NSBitmapImageRep alloc] initWithBitmapDataPlanes:NULL
     pixelsWide:self.inputWidthLEDWall
     pixelsHigh:self.inputHeightLEDWall
     bitsPerSample:8
     samplesPerPixel:4
     hasAlpha:YES
     isPlanar:YES
     colorSpaceName:NSDeviceRGBColorSpace
     bitmapFormat:0
     bytesPerRow: 0
     bitsPerPixel:32] autorelease];

    ///////////////////////////////////////////////////////
    // Set up the vImage buffer for the source image
    ///////////////////////////////////////////////////////
    buffer.data     = (void*)[imageToUse bufferBaseAddress];
    buffer.rowBytes = [imageToUse bufferBytesPerRow];
    buffer.width    = [imageToUse bufferPixelsWide];
    buffer.height   = [imageToUse bufferPixelsHigh];

    ///////////////////////////////////////////////////////
    // Set up the vImage buffer for the destination
    ///////////////////////////////////////////////////////
    vDestBuffer.data = [newRep bitmapData];
    vDestBuffer.height = [newRep pixelsHigh];
    vDestBuffer.width = [newRep pixelsWide];
    vDestBuffer.rowBytes = [newRep bytesPerRow];

    ///////////////////////////////////////////////////////
    // do the scale
    ///////////////////////////////////////////////////////
    if (vImageScale_ARGB8888(&buffer, &vDestBuffer, NULL, 0))
    {
        [imageToUse unlockBufferRepresentation];
        return NO;
    }

    // unlock the source image
    [imageToUse unlockBufferRepresentation];

    ///////////////////////////////////////////////////////
    // read out pixeldata from the scaled down image
    ///////////////////////////////////////////////////////

//[sendBuffer appendBytes:[newRep bitmapData] length: ([newRep pixelsWide] * [newRep pixelsHigh] * 3 ) ];




    /*

    NSMutableData* imgData = [[NSMutableData alloc] init];
    */

    void *pixelBuffer = malloc(lengthByte-35);
    int c = 0;

    for(int y=0;y<vDestBuffer.height;y++)
    {

        for(int x=0;x<vDestBuffer.width;x++)
        {

            long currentLineOffset = y * vDestBuffer.rowBytes + x*4;
            c++;
//[imgData appendBytes:&vDestBuffer.data[currentLineOffset+1] length:3]; memcpy(&sendBuffer[32+c], &vDestBuffer.data[currentLineOffset+1], 3);

        }

    }



    // now end the buffer
    NSString* endString = @"end";
//[sendBuffer appendData:[endString dataUsingEncoding:NSASCIIStringEncoding]];

    memcpy(sendBuffer, &endString, 3);
    //[sendBuffer appendData:&(@"end")];


    //NSData* data = [NSData dataWithData:sendBuffer];
    //[connectSocket writeData:data  withTimeout:-1 tag:0];

NSData* dataToSend = [NSData dataWithBytes:sendBuffer length:lengthByte];

    [connectSocket writeData:dataToSend  withTimeout:-1 tag:0];

    NSLog(@"wrote %d bytes, message id %d", lengthByte, counter);

    free(pixelBuffer);
    free(sendBuffer);

    return YES;

}



--
Christophe Leske
multimedial.de

----------------------------------------
www.multimedial.de - i...@multimedial.de
Hohler Strasse 17 - 51645 Gummersbach
+49(0)2261-99824540 // +49(0)177-2497031
----------------------------------------

_______________________________________________
Do not post admin requests to the list. They will be ignored.
Quartzcomposer-dev mailing list      (Quartzcomposer-dev@lists.apple.com)
Help/Unsubscribe/Update your Subscription:
https://lists.apple.com/mailman/options/quartzcomposer-dev/archive%40mail-archive.com

This email sent to arch...@mail-archive.com

Reply via email to