diff --git a/GPUImage/GPUImageMac.xcodeproj/project.pbxproj b/GPUImage/GPUImageMac.xcodeproj/project.pbxproj new file mode 100644 index 0000000..7d65820 --- /dev/null +++ b/GPUImage/GPUImageMac.xcodeproj/project.pbxproj @@ -0,0 +1,1547 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 46; + objects = { + +/* Begin PBXBuildFile section */ + A87E5E12177648F3007FD5B1 /* GPUImageRawDataOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = A87E5E10177648F3007FD5B1 /* GPUImageRawDataOutput.h */; settings = {ATTRIBUTES = (Public, ); }; }; + A87E5E13177648F3007FD5B1 /* GPUImageRawDataOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = A87E5E11177648F3007FD5B1 /* GPUImageRawDataOutput.m */; }; + A87E5E1617764B16007FD5B1 /* GPUImageRawDataInput.h in Headers */ = {isa = PBXBuildFile; fileRef = A87E5E1417764B16007FD5B1 /* GPUImageRawDataInput.h */; settings = {ATTRIBUTES = (Public, ); }; }; + A87E5E1717764B16007FD5B1 /* GPUImageRawDataInput.m in Sources */ = {isa = PBXBuildFile; fileRef = A87E5E1517764B16007FD5B1 /* GPUImageRawDataInput.m */; }; + BC56957A174683620081491B /* GPUImageCrosshairGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = BC569576174683620081491B /* GPUImageCrosshairGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC56957B174683620081491B /* GPUImageCrosshairGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = BC569577174683620081491B /* GPUImageCrosshairGenerator.m */; }; + BC56957C174683620081491B /* GPUImageLineGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = BC569578174683620081491B /* GPUImageLineGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC56957D174683620081491B /* GPUImageLineGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = BC569579174683620081491B /* GPUImageLineGenerator.m */; }; + BC6C553C1730636600EB222D /* GPUImageLaplacianFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC6C553A1730636600EB222D /* GPUImageLaplacianFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC6C553D1730636600EB222D /* GPUImageLaplacianFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC6C553B1730636600EB222D /* GPUImageLaplacianFilter.m */; }; + BC6C55461730BDCF00EB222D /* GPUImageLanczosResamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC6C55441730BDCF00EB222D /* GPUImageLanczosResamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC6C55471730BDCF00EB222D /* GPUImageLanczosResamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC6C55451730BDCF00EB222D /* GPUImageLanczosResamplingFilter.m */; }; + BC78B70E172DCCB800342C6A /* GPUImageAmatorkaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC78B706172DCCB800342C6A /* GPUImageAmatorkaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC78B70F172DCCB800342C6A /* GPUImageAmatorkaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC78B707172DCCB800342C6A /* GPUImageAmatorkaFilter.m */; }; + BC78B710172DCCB800342C6A /* GPUImageLookupFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC78B708172DCCB800342C6A /* GPUImageLookupFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC78B711172DCCB800342C6A /* GPUImageLookupFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC78B709172DCCB800342C6A /* GPUImageLookupFilter.m */; }; + BC78B712172DCCB800342C6A /* GPUImageMissEtikateFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC78B70A172DCCB800342C6A /* GPUImageMissEtikateFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC78B713172DCCB800342C6A /* GPUImageMissEtikateFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC78B70B172DCCB800342C6A /* GPUImageMissEtikateFilter.m */; }; + BC78B714172DCCB800342C6A /* GPUImageSoftEleganceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC78B70C172DCCB800342C6A /* GPUImageSoftEleganceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC78B715172DCCB800342C6A /* GPUImageSoftEleganceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC78B70D172DCCB800342C6A /* GPUImageSoftEleganceFilter.m */; }; + BC8A583218124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC8A583018124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC8A583318124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC8A583118124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.m */; }; + BC96A4F1176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC96A4EF176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BC96A4F2176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC96A4F0176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.m */; }; + BCA20B571746C9EC0097C84A /* GPUImageBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B4F1746C9EC0097C84A /* GPUImageBuffer.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B581746C9EC0097C84A /* GPUImageBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B501746C9EC0097C84A /* GPUImageBuffer.m */; }; + BCA20B591746C9EC0097C84A /* GPUImageHighPassFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B511746C9EC0097C84A /* GPUImageHighPassFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B5A1746C9EC0097C84A /* GPUImageHighPassFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B521746C9EC0097C84A /* GPUImageHighPassFilter.m */; }; + BCA20B5B1746C9EC0097C84A /* GPUImageLowPassFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B531746C9EC0097C84A /* GPUImageLowPassFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B5C1746C9EC0097C84A /* GPUImageLowPassFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B541746C9EC0097C84A /* GPUImageLowPassFilter.m */; }; + BCA20B5D1746C9EC0097C84A /* GPUImageMotionDetector.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B551746C9EC0097C84A /* GPUImageMotionDetector.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B5E1746C9EC0097C84A /* GPUImageMotionDetector.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B561746C9EC0097C84A /* GPUImageMotionDetector.m */; }; + BCA20B611746CFE60097C84A /* GPUImageThresholdSketchFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B5F1746CFE60097C84A /* GPUImageThresholdSketchFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B621746CFE60097C84A /* GPUImageThresholdSketchFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B601746CFE60097C84A /* GPUImageThresholdSketchFilter.m */; }; + BCA20B651746D3C30097C84A /* GPUImageSmoothToonFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B631746D3C30097C84A /* GPUImageSmoothToonFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B661746D3C30097C84A /* GPUImageSmoothToonFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B641746D3C30097C84A /* GPUImageSmoothToonFilter.m */; }; + BCA20B691746D3DD0097C84A /* GPUImageTiltShiftFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B671746D3DC0097C84A /* GPUImageTiltShiftFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B6A1746D3DD0097C84A /* GPUImageTiltShiftFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B681746D3DC0097C84A /* GPUImageTiltShiftFilter.m */; }; + BCA20B6D1746D44B0097C84A /* GPUImageCGAColorspaceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B6B1746D44A0097C84A /* GPUImageCGAColorspaceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B6E1746D44B0097C84A /* GPUImageCGAColorspaceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B6C1746D44A0097C84A /* GPUImageCGAColorspaceFilter.m */; }; + BCA20B711746D4AC0097C84A /* GPUImagePosterizeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B6F1746D4AC0097C84A /* GPUImagePosterizeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B721746D4AC0097C84A /* GPUImagePosterizeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B701746D4AC0097C84A /* GPUImagePosterizeFilter.m */; }; + BCA20B751746DE850097C84A /* GPUImageKuwaharaRadius3Filter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B731746DE850097C84A /* GPUImageKuwaharaRadius3Filter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B761746DE850097C84A /* GPUImageKuwaharaRadius3Filter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B741746DE850097C84A /* GPUImageKuwaharaRadius3Filter.m */; }; + BCA20B791746DEDE0097C84A /* GPUImageChromaKeyFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B771746DEDD0097C84A /* GPUImageChromaKeyFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B7A1746DEDE0097C84A /* GPUImageChromaKeyFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B781746DEDE0097C84A /* GPUImageChromaKeyFilter.m */; }; + BCA20B7D1746DF210097C84A /* GPUImageVignetteFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B7B1746DF200097C84A /* GPUImageVignetteFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B7E1746DF210097C84A /* GPUImageVignetteFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B7C1746DF200097C84A /* GPUImageVignetteFilter.m */; }; + BCA20B991747081D0097C84A /* GPUImageBulgeDistortionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B7F1747080E0097C84A /* GPUImageBulgeDistortionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B9A1747081D0097C84A /* GPUImageBulgeDistortionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B801747080F0097C84A /* GPUImageBulgeDistortionFilter.m */; }; + BCA20B9B1747081D0097C84A /* GPUImageClosingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B811747080F0097C84A /* GPUImageClosingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B9C1747081D0097C84A /* GPUImageClosingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B82174708100097C84A /* GPUImageClosingFilter.m */; }; + BCA20B9D1747081D0097C84A /* GPUImageDilationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B83174708110097C84A /* GPUImageDilationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20B9E1747081D0097C84A /* GPUImageDilationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B84174708120097C84A /* GPUImageDilationFilter.m */; }; + BCA20B9F1747081D0097C84A /* GPUImageErosionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B85174708120097C84A /* GPUImageErosionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BA01747081D0097C84A /* GPUImageErosionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B86174708130097C84A /* GPUImageErosionFilter.m */; }; + BCA20BA11747081D0097C84A /* GPUImageGlassSphereFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B87174708140097C84A /* GPUImageGlassSphereFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BA21747081D0097C84A /* GPUImageGlassSphereFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B88174708150097C84A /* GPUImageGlassSphereFilter.m */; }; + BCA20BA31747081D0097C84A /* GPUImagePinchDistortionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B89174708150097C84A /* GPUImagePinchDistortionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BA41747081D0097C84A /* GPUImagePinchDistortionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B8A174708150097C84A /* GPUImagePinchDistortionFilter.m */; }; + BCA20BA51747081D0097C84A /* GPUImageRGBClosingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B8B174708160097C84A /* GPUImageRGBClosingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BA61747081D0097C84A /* GPUImageRGBClosingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B8C174708160097C84A /* GPUImageRGBClosingFilter.m */; }; + BCA20BA71747081D0097C84A /* GPUImageRGBDilationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B8D174708190097C84A /* GPUImageRGBDilationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BA81747081D0097C84A /* GPUImageRGBDilationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B8E174708190097C84A /* GPUImageRGBDilationFilter.m */; }; + BCA20BA91747081D0097C84A /* GPUImageRGBErosionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B8F174708190097C84A /* GPUImageRGBErosionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BAA1747081D0097C84A /* GPUImageRGBErosionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B901747081A0097C84A /* GPUImageRGBErosionFilter.m */; }; + BCA20BAB1747081D0097C84A /* GPUImageRGBOpeningFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B911747081A0097C84A /* GPUImageRGBOpeningFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BAC1747081D0097C84A /* GPUImageRGBOpeningFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B921747081B0097C84A /* GPUImageRGBOpeningFilter.m */; }; + BCA20BAD1747081D0097C84A /* GPUImageSphereRefractionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B931747081B0097C84A /* GPUImageSphereRefractionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BAE1747081D0097C84A /* GPUImageSphereRefractionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B941747081B0097C84A /* GPUImageSphereRefractionFilter.m */; }; + BCA20BAF1747081D0097C84A /* GPUImageStretchDistortionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B951747081C0097C84A /* GPUImageStretchDistortionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BB01747081D0097C84A /* GPUImageStretchDistortionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B961747081C0097C84A /* GPUImageStretchDistortionFilter.m */; }; + BCA20BB11747081D0097C84A /* GPUImageSwirlFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20B971747081C0097C84A /* GPUImageSwirlFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BB21747081D0097C84A /* GPUImageSwirlFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20B981747081D0097C84A /* GPUImageSwirlFilter.m */; }; + BCA20BB5174708630097C84A /* GPUImageOpeningFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20BB3174708610097C84A /* GPUImageOpeningFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BB6174708630097C84A /* GPUImageOpeningFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20BB4174708620097C84A /* GPUImageOpeningFilter.m */; }; + BCA20BC117471C4B0097C84A /* GPUImageJFAVoronoiFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20BB717471C3A0097C84A /* GPUImageJFAVoronoiFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BC217471C4B0097C84A /* GPUImageJFAVoronoiFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20BB817471C3C0097C84A /* GPUImageJFAVoronoiFilter.m */; }; + BCA20BC317471C4B0097C84A /* GPUImageLocalBinaryPatternFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20BB917471C3D0097C84A /* GPUImageLocalBinaryPatternFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BC417471C4B0097C84A /* GPUImageLocalBinaryPatternFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20BBA17471C3F0097C84A /* GPUImageLocalBinaryPatternFilter.m */; }; + BCA20BC517471C4B0097C84A /* GPUImageMosaicFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20BBB17471C400097C84A /* GPUImageMosaicFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BC617471C4B0097C84A /* GPUImageMosaicFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20BBC17471C420097C84A /* GPUImageMosaicFilter.m */; }; + BCA20BC717471C4B0097C84A /* GPUImagePerlinNoiseFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20BBD17471C440097C84A /* GPUImagePerlinNoiseFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BC817471C4B0097C84A /* GPUImagePerlinNoiseFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20BBE17471C460097C84A /* GPUImagePerlinNoiseFilter.m */; }; + BCA20BC917471C4B0097C84A /* GPUImageVoronoiConsumerFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA20BBF17471C480097C84A /* GPUImageVoronoiConsumerFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCA20BCA17471C4B0097C84A /* GPUImageVoronoiConsumerFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCA20BC017471C490097C84A /* GPUImageVoronoiConsumerFilter.m */; }; + BCA3F32017239B6500E28AEC /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCA3F31F17239B6500E28AEC /* Cocoa.framework */; }; + BCA3F3621723A8FA00E28AEC /* OpenGL.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCA3F3601723A8FA00E28AEC /* OpenGL.framework */; }; + BCA3F3631723A8FA00E28AEC /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCA3F3611723A8FA00E28AEC /* QuartzCore.framework */; }; + BCA3F3651723A96600E28AEC /* GPUImage.h in Headers */ = {isa = PBXBuildFile; fileRef = BCA3F3641723A96600E28AEC /* GPUImage.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCAE41E41731C3000020F80D /* GPUImageThreeInputFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCAE41E21731C3000020F80D /* GPUImageThreeInputFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCAE41E51731C3000020F80D /* GPUImageThreeInputFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCAE41E31731C3000020F80D /* GPUImageThreeInputFilter.m */; }; + BCB009E6172A1BBD00DB804C /* GPUImageGammaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB009E4172A1BBD00DB804C /* GPUImageGammaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCB009E7172A1BBD00DB804C /* GPUImageGammaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB009E5172A1BBD00DB804C /* GPUImageGammaFilter.m */; }; + BCB009EA172A1BE800DB804C /* GPUImageToneCurveFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB009E8172A1BE800DB804C /* GPUImageToneCurveFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCB009EB172A1BE800DB804C /* GPUImageToneCurveFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB009E9172A1BE800DB804C /* GPUImageToneCurveFilter.m */; }; + BCB009F0172A23AA00DB804C /* GPUImageHazeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB009EC172A23A900DB804C /* GPUImageHazeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCB009F1172A23AA00DB804C /* GPUImageHazeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB009ED172A23A900DB804C /* GPUImageHazeFilter.m */; }; + BCB009F2172A23AA00DB804C /* GPUImageHighlightShadowFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB009EE172A23AA00DB804C /* GPUImageHighlightShadowFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCB009F3172A23AA00DB804C /* GPUImageHighlightShadowFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB009EF172A23AA00DB804C /* GPUImageHighlightShadowFilter.m */; }; + BCB009F6172A243400DB804C /* GPUImageSepiaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB009F4172A243300DB804C /* GPUImageSepiaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCB009F7172A243400DB804C /* GPUImageSepiaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB009F5172A243400DB804C /* GPUImageSepiaFilter.m */; }; + BCB009FA172A246E00DB804C /* GPUImageColorMatrixFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB009F8172A246D00DB804C /* GPUImageColorMatrixFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCB009FB172A246E00DB804C /* GPUImageColorMatrixFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB009F9172A246E00DB804C /* GPUImageColorMatrixFilter.m */; }; + BCBF6931173061D200E5792A /* GPUImage3x3ConvolutionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCBF692F173061D200E5792A /* GPUImage3x3ConvolutionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCBF6932173061D200E5792A /* GPUImage3x3ConvolutionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCBF6930173061D200E5792A /* GPUImage3x3ConvolutionFilter.m */; }; + BCBF69351730623200E5792A /* GPUImageEmbossFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCBF69331730623200E5792A /* GPUImageEmbossFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCBF69361730623200E5792A /* GPUImageEmbossFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCBF69341730623200E5792A /* GPUImageEmbossFilter.m */; }; + BCC0DF4217358315007C485F /* GPUImageColorInvertFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF4017358315007C485F /* GPUImageColorInvertFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF4317358315007C485F /* GPUImageColorInvertFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF4117358315007C485F /* GPUImageColorInvertFilter.m */; }; + BCC0DF4817359529007C485F /* GPUImageHistogramFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF4417359529007C485F /* GPUImageHistogramFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF4917359529007C485F /* GPUImageHistogramFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF4517359529007C485F /* GPUImageHistogramFilter.m */; }; + BCC0DF4A17359529007C485F /* GPUImageHistogramGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF4617359529007C485F /* GPUImageHistogramGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF4B17359529007C485F /* GPUImageHistogramGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF4717359529007C485F /* GPUImageHistogramGenerator.m */; }; + BCC0DF52173595D3007C485F /* GPUImageAverageColor.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF4C173595D3007C485F /* GPUImageAverageColor.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF53173595D3007C485F /* GPUImageAverageColor.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF4D173595D3007C485F /* GPUImageAverageColor.m */; }; + BCC0DF54173595D3007C485F /* GPUImageLuminosity.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF4E173595D3007C485F /* GPUImageLuminosity.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF55173595D3007C485F /* GPUImageLuminosity.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF4F173595D3007C485F /* GPUImageLuminosity.m */; }; + BCC0DF56173595D3007C485F /* GPUImageSolidColorGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF50173595D3007C485F /* GPUImageSolidColorGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF57173595D3007C485F /* GPUImageSolidColorGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF51173595D3007C485F /* GPUImageSolidColorGenerator.m */; }; + BCC0DF5A173596A8007C485F /* GPUImageAdaptiveThresholdFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF58173596A8007C485F /* GPUImageAdaptiveThresholdFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF5B173596A8007C485F /* GPUImageAdaptiveThresholdFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF59173596A8007C485F /* GPUImageAdaptiveThresholdFilter.m */; }; + BCC0DF5E173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF5C173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF5F173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF5D173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.m */; }; + BCC0DF6217359F42007C485F /* GPUImageLuminanceThresholdFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCC0DF6017359F42007C485F /* GPUImageLuminanceThresholdFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCC0DF6317359F43007C485F /* GPUImageLuminanceThresholdFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC0DF6117359F42007C485F /* GPUImageLuminanceThresholdFilter.m */; }; + BCE2EF8817459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF8217459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF8917459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF8317459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.m */; }; + BCE2EF8A17459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF8417459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF8B17459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF8517459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.m */; }; + BCE2EF8C17459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF8617459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF8D17459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF8717459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.m */; }; + BCE2EF9217459D9300C0628C /* GPUImageColorPackingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF8E17459D9300C0628C /* GPUImageColorPackingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF9317459D9300C0628C /* GPUImageColorPackingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF8F17459D9300C0628C /* GPUImageColorPackingFilter.m */; }; + BCE2EF9417459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF9017459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF9517459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF9117459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.m */; }; + BCE2EF9817459E2400C0628C /* GPUImageHoughTransformLineDetector.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF9617459E2400C0628C /* GPUImageHoughTransformLineDetector.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF9917459E2400C0628C /* GPUImageHoughTransformLineDetector.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF9717459E2400C0628C /* GPUImageHoughTransformLineDetector.m */; }; + BCE2EF9C17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE2EF9A17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE2EF9D17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE2EF9B17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.m */; }; + BCE817A81735FDC70071D084 /* GPUImageHalftoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817A01735FDC60071D084 /* GPUImageHalftoneFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817A91735FDC70071D084 /* GPUImageHalftoneFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817A11735FDC60071D084 /* GPUImageHalftoneFilter.m */; }; + BCE817AA1735FDC70071D084 /* GPUImagePixellatePositionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817A21735FDC60071D084 /* GPUImagePixellatePositionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817AB1735FDC70071D084 /* GPUImagePixellatePositionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817A31735FDC60071D084 /* GPUImagePixellatePositionFilter.m */; }; + BCE817AC1735FDC70071D084 /* GPUImagePolarPixellateFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817A41735FDC70071D084 /* GPUImagePolarPixellateFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817AD1735FDC70071D084 /* GPUImagePolarPixellateFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817A51735FDC70071D084 /* GPUImagePolarPixellateFilter.m */; }; + BCE817AE1735FDC70071D084 /* GPUImagePolkaDotFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817A61735FDC70071D084 /* GPUImagePolkaDotFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817AF1735FDC70071D084 /* GPUImagePolkaDotFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817A71735FDC70071D084 /* GPUImagePolkaDotFilter.m */; }; + BCE817B21735FE150071D084 /* GPUImageCrosshatchFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817B01735FE150071D084 /* GPUImageCrosshatchFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817B31735FE150071D084 /* GPUImageCrosshatchFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817B11735FE150071D084 /* GPUImageCrosshatchFilter.m */; }; + BCE817BA173608690071D084 /* GPUImageCannyEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817B4173608690071D084 /* GPUImageCannyEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817BB173608690071D084 /* GPUImageCannyEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817B5173608690071D084 /* GPUImageCannyEdgeDetectionFilter.m */; }; + BCE817BC173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817B6173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817BD173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817B7173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.m */; }; + BCE817BE173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817B8173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817BF173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817B9173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.m */; }; + BCE817C21736092A0071D084 /* GPUImageXYDerivativeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817C01736092A0071D084 /* GPUImageXYDerivativeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817C31736092A0071D084 /* GPUImageXYDerivativeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817C11736092A0071D084 /* GPUImageXYDerivativeFilter.m */; }; + BCE817CC17360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817C817360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817CD17360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817C917360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.m */; }; + BCE817CE17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817CA17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817CF17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817CB17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.m */; }; + BCE817D217360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCE817D017360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCE817D317360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCE817D117360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.m */; }; + BCF40F1217247D68005AE36A /* GPUImageContext.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF40F1017247D68005AE36A /* GPUImageContext.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF40F1317247D68005AE36A /* GPUImageContext.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF40F1117247D68005AE36A /* GPUImageContext.m */; }; + BCF40F1617247FD8005AE36A /* GLProgram.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF40F1417247FD8005AE36A /* GLProgram.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF40F1717247FD8005AE36A /* GLProgram.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF40F1517247FD8005AE36A /* GLProgram.m */; }; + BCF40F1B1724829B005AE36A /* GPUImageOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF40F191724829B005AE36A /* GPUImageOutput.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF40F1C1724829B005AE36A /* GPUImageOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF40F1A1724829B005AE36A /* GPUImageOutput.m */; }; + BCF40F2017248322005AE36A /* GPUImageFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF40F1E17248322005AE36A /* GPUImageFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF40F2117248322005AE36A /* GPUImageFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF40F1F17248322005AE36A /* GPUImageFilter.m */; }; + BCF40F2517248836005AE36A /* GPUImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF40F2317248836005AE36A /* GPUImageView.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF40F2617248836005AE36A /* GPUImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF40F2417248836005AE36A /* GPUImageView.m */; }; + BCF40F29172488D3005AE36A /* GPUImageBrightnessFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF40F27172488D3005AE36A /* GPUImageBrightnessFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF40F2A172488D3005AE36A /* GPUImageBrightnessFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF40F28172488D3005AE36A /* GPUImageBrightnessFilter.m */; }; + BCF867561725AF9300912E34 /* GPUImageMovieWriter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867521725AF9300912E34 /* GPUImageMovieWriter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867571725AF9300912E34 /* GPUImageMovieWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867531725AF9300912E34 /* GPUImageMovieWriter.m */; }; + BCF867581725AF9300912E34 /* GPUImagePicture.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867541725AF9300912E34 /* GPUImagePicture.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867591725AF9300912E34 /* GPUImagePicture.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867551725AF9300912E34 /* GPUImagePicture.m */; }; + BCF8675B1725BBE300912E34 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCF8675A1725BBE300912E34 /* AVFoundation.framework */; }; + BCF8675D1725BBE900912E34 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCF8675C1725BBE900912E34 /* CoreMedia.framework */; }; + BCF8679D1727585D00912E34 /* GPUImageAVCamera.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8679B1727585D00912E34 /* GPUImageAVCamera.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8679E1727585D00912E34 /* GPUImageAVCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8679C1727585D00912E34 /* GPUImageAVCamera.m */; }; + BCF867AD172778D500912E34 /* GPUImageContrastFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867A3172778D500912E34 /* GPUImageContrastFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867AE172778D500912E34 /* GPUImageContrastFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867A4172778D500912E34 /* GPUImageContrastFilter.m */; }; + BCF867AF172778D500912E34 /* GPUImageExposureFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867A5172778D500912E34 /* GPUImageExposureFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867B0172778D500912E34 /* GPUImageExposureFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867A6172778D500912E34 /* GPUImageExposureFilter.m */; }; + BCF867B1172778D500912E34 /* GPUImageHueFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867A7172778D500912E34 /* GPUImageHueFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867B2172778D500912E34 /* GPUImageHueFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867A8172778D500912E34 /* GPUImageHueFilter.m */; }; + BCF867B3172778D500912E34 /* GPUImageSaturationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867A9172778D500912E34 /* GPUImageSaturationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867B4172778D500912E34 /* GPUImageSaturationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867AA172778D500912E34 /* GPUImageSaturationFilter.m */; }; + BCF867B5172778D500912E34 /* GPUImageWhiteBalanceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867AB172778D500912E34 /* GPUImageWhiteBalanceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867B6172778D500912E34 /* GPUImageWhiteBalanceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867AC172778D500912E34 /* GPUImageWhiteBalanceFilter.m */; }; + BCF867B91727791200912E34 /* GPUImageLevelsFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867B71727791200912E34 /* GPUImageLevelsFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867BA1727791200912E34 /* GPUImageLevelsFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867B81727791200912E34 /* GPUImageLevelsFilter.m */; }; + BCF867BF1727794000912E34 /* GPUImageMonochromeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867BB1727794000912E34 /* GPUImageMonochromeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867C01727794000912E34 /* GPUImageMonochromeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867BC1727794000912E34 /* GPUImageMonochromeFilter.m */; }; + BCF867C11727794000912E34 /* GPUImageRGBFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867BD1727794000912E34 /* GPUImageRGBFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867C21727794000912E34 /* GPUImageRGBFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867BE1727794000912E34 /* GPUImageRGBFilter.m */; }; + BCF867C71727872D00912E34 /* GPUImagePixellateFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867C51727872D00912E34 /* GPUImagePixellateFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867C81727872D00912E34 /* GPUImagePixellateFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867C61727872D00912E34 /* GPUImagePixellateFilter.m */; }; + BCF867CB172788F100912E34 /* GPUImage3x3TextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867C9172788F100912E34 /* GPUImage3x3TextureSamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867CC172788F100912E34 /* GPUImage3x3TextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867CA172788F100912E34 /* GPUImage3x3TextureSamplingFilter.m */; }; + BCF867CF172789B200912E34 /* GPUImageSketchFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867CD172789B200912E34 /* GPUImageSketchFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867D0172789B200912E34 /* GPUImageSketchFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867CE172789B200912E34 /* GPUImageSketchFilter.m */; }; + BCF867D417278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867D217278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867D517278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867D317278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.m */; }; + BCF867D817278A7A00912E34 /* GPUImageToonFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867D617278A7900912E34 /* GPUImageToonFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867D917278A7A00912E34 /* GPUImageToonFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867D717278A7900912E34 /* GPUImageToonFilter.m */; }; + BCF867DC17278C5A00912E34 /* GPUImageTwoPassFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867DA17278C5800912E34 /* GPUImageTwoPassFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867DD17278C5A00912E34 /* GPUImageTwoPassFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867DB17278C5900912E34 /* GPUImageTwoPassFilter.m */; }; + BCF867E017278D1900912E34 /* GPUImageGrayscaleFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867DE17278D1900912E34 /* GPUImageGrayscaleFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867E117278D1900912E34 /* GPUImageGrayscaleFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867DF17278D1900912E34 /* GPUImageGrayscaleFilter.m */; }; + BCF867E41727903D00912E34 /* GPUImageKuwaharaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867E21727903C00912E34 /* GPUImageKuwaharaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867E51727903D00912E34 /* GPUImageKuwaharaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867E31727903D00912E34 /* GPUImageKuwaharaFilter.m */; }; + BCF867E81728477300912E34 /* GPUImageFalseColorFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867E61728477200912E34 /* GPUImageFalseColorFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867E91728477300912E34 /* GPUImageFalseColorFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867E71728477200912E34 /* GPUImageFalseColorFilter.m */; }; + BCF867EC172847A000912E34 /* GPUImageSharpenFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867EA172847A000912E34 /* GPUImageSharpenFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867ED172847A000912E34 /* GPUImageSharpenFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867EB172847A000912E34 /* GPUImageSharpenFilter.m */; }; + BCF867F01728487100912E34 /* GPUImageUnsharpMaskFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867EE1728487000912E34 /* GPUImageUnsharpMaskFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867F11728487100912E34 /* GPUImageUnsharpMaskFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867EF1728487000912E34 /* GPUImageUnsharpMaskFilter.m */; }; + BCF867F41728490600912E34 /* GPUImageTwoInputFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867F21728490300912E34 /* GPUImageTwoInputFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867F51728490600912E34 /* GPUImageTwoInputFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867F31728490500912E34 /* GPUImageTwoInputFilter.m */; }; + BCF867F81728494C00912E34 /* GPUImageGaussianBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867F61728494B00912E34 /* GPUImageGaussianBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867F91728494C00912E34 /* GPUImageGaussianBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867F71728494B00912E34 /* GPUImageGaussianBlurFilter.m */; }; + BCF867FC1728496E00912E34 /* GPUImageTwoPassTextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867FA1728496A00912E34 /* GPUImageTwoPassTextureSamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF867FD1728496E00912E34 /* GPUImageTwoPassTextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867FB1728496C00912E34 /* GPUImageTwoPassTextureSamplingFilter.m */; }; + BCF8680017284C4000912E34 /* GPUImageFilterGroup.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF867FE17284C3C00912E34 /* GPUImageFilterGroup.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8680117284C4000912E34 /* GPUImageFilterGroup.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF867FF17284C3E00912E34 /* GPUImageFilterGroup.m */; }; + BCF8680417284CFC00912E34 /* GPUImageTransformFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8680217284CFB00912E34 /* GPUImageTransformFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8680517284CFC00912E34 /* GPUImageTransformFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8680317284CFB00912E34 /* GPUImageTransformFilter.m */; }; + BCF8680817284D0B00912E34 /* GPUImageCropFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8680617284D0A00912E34 /* GPUImageCropFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8680917284D0B00912E34 /* GPUImageCropFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8680717284D0A00912E34 /* GPUImageCropFilter.m */; }; + BCF86810172853AA00912E34 /* GPUImageGaussianBlurPositionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8680C172853A500912E34 /* GPUImageGaussianBlurPositionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF86811172853AA00912E34 /* GPUImageGaussianBlurPositionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8680D172853A600912E34 /* GPUImageGaussianBlurPositionFilter.m */; }; + BCF86812172853AA00912E34 /* GPUImageGaussianSelectiveBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8680E172853A700912E34 /* GPUImageGaussianSelectiveBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF86813172853AA00912E34 /* GPUImageGaussianSelectiveBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8680F172853A800912E34 /* GPUImageGaussianSelectiveBlurFilter.m */; }; + BCF8681C172854B900912E34 /* GPUImageBilateralFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86814172854AA00912E34 /* GPUImageBilateralFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8681D172854B900912E34 /* GPUImageBilateralFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86815172854AC00912E34 /* GPUImageBilateralFilter.m */; }; + BCF8681E172854B900912E34 /* GPUImageBoxBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86816172854AE00912E34 /* GPUImageBoxBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8681F172854B900912E34 /* GPUImageBoxBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86817172854B100912E34 /* GPUImageBoxBlurFilter.m */; }; + BCF86822172854B900912E34 /* GPUImageMedianFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8681A172854B600912E34 /* GPUImageMedianFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF86823172854B900912E34 /* GPUImageMedianFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8681B172854B800912E34 /* GPUImageMedianFilter.m */; }; + BCF86828172857B900912E34 /* GPUImageMotionBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86824172857AD00912E34 /* GPUImageMotionBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF86829172857B900912E34 /* GPUImageMotionBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86825172857B000912E34 /* GPUImageMotionBlurFilter.m */; }; + BCF8682A172857B900912E34 /* GPUImageZoomBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86826172857B200912E34 /* GPUImageZoomBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8682B172857B900912E34 /* GPUImageZoomBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86827172857B600912E34 /* GPUImageZoomBlurFilter.m */; }; + BCF8683017286EDB00912E34 /* GPUImageAddBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8682D17286EDB00912E34 /* GPUImageAddBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8683117286EDB00912E34 /* GPUImageAddBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8682E17286EDB00912E34 /* GPUImageAddBlendFilter.m */; }; + BCF8683F17286F2800912E34 /* GPUImageColorBurnBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8683317286F2800912E34 /* GPUImageColorBurnBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8684017286F2800912E34 /* GPUImageColorBurnBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8683417286F2800912E34 /* GPUImageColorBurnBlendFilter.m */; }; + BCF8684117286F2800912E34 /* GPUImageDarkenBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8683517286F2800912E34 /* GPUImageDarkenBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8684217286F2800912E34 /* GPUImageDarkenBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8683617286F2800912E34 /* GPUImageDarkenBlendFilter.m */; }; + BCF8684317286F2800912E34 /* GPUImageDivideBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8683717286F2800912E34 /* GPUImageDivideBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8684417286F2800912E34 /* GPUImageDivideBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8683817286F2800912E34 /* GPUImageDivideBlendFilter.m */; }; + BCF8684517286F2800912E34 /* GPUImageLightenBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8683917286F2800912E34 /* GPUImageLightenBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8684617286F2800912E34 /* GPUImageLightenBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8683A17286F2800912E34 /* GPUImageLightenBlendFilter.m */; }; + BCF8684717286F2800912E34 /* GPUImageMultiplyBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8683B17286F2800912E34 /* GPUImageMultiplyBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8684817286F2800912E34 /* GPUImageMultiplyBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8683C17286F2800912E34 /* GPUImageMultiplyBlendFilter.m */; }; + BCF8684917286F2800912E34 /* GPUImageOverlayBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8683D17286F2800912E34 /* GPUImageOverlayBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8684A17286F2800912E34 /* GPUImageOverlayBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8683E17286F2800912E34 /* GPUImageOverlayBlendFilter.m */; }; + BCF868531728716400912E34 /* GPUImageColorDodgeBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8684B1728715800912E34 /* GPUImageColorDodgeBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868541728716400912E34 /* GPUImageColorDodgeBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8684C1728715900912E34 /* GPUImageColorDodgeBlendFilter.m */; }; + BCF868551728716400912E34 /* GPUImageDifferenceBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8684D1728715C00912E34 /* GPUImageDifferenceBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868561728716400912E34 /* GPUImageDifferenceBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8684E1728715D00912E34 /* GPUImageDifferenceBlendFilter.m */; }; + BCF868571728716400912E34 /* GPUImageLinearBurnBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8684F1728715F00912E34 /* GPUImageLinearBurnBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868581728716400912E34 /* GPUImageLinearBurnBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868501728716000912E34 /* GPUImageLinearBurnBlendFilter.m */; }; + BCF868591728716400912E34 /* GPUImageScreenBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF868511728716200912E34 /* GPUImageScreenBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8685A1728716400912E34 /* GPUImageScreenBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868521728716300912E34 /* GPUImageScreenBlendFilter.m */; }; + BCF8686F1728721300912E34 /* GPUImageColorBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8685B172871EE00912E34 /* GPUImageColorBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868701728721300912E34 /* GPUImageColorBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8685C172871F000912E34 /* GPUImageColorBlendFilter.m */; }; + BCF868711728721300912E34 /* GPUImageExclusionBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8685D172871F300912E34 /* GPUImageExclusionBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868721728721300912E34 /* GPUImageExclusionBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8685E172871F500912E34 /* GPUImageExclusionBlendFilter.m */; }; + BCF868731728721300912E34 /* GPUImageHardLightBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8685F172871F800912E34 /* GPUImageHardLightBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868741728721300912E34 /* GPUImageHardLightBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86860172871FB00912E34 /* GPUImageHardLightBlendFilter.m */; }; + BCF868751728721300912E34 /* GPUImageHueBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86861172871FC00912E34 /* GPUImageHueBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868761728721300912E34 /* GPUImageHueBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86862172871FD00912E34 /* GPUImageHueBlendFilter.m */; }; + BCF868771728721300912E34 /* GPUImageLuminosityBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86863172871FF00912E34 /* GPUImageLuminosityBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868781728721300912E34 /* GPUImageLuminosityBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868641728720100912E34 /* GPUImageLuminosityBlendFilter.m */; }; + BCF868791728721300912E34 /* GPUImageNormalBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF868651728720200912E34 /* GPUImageNormalBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8687A1728721300912E34 /* GPUImageNormalBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868661728720400912E34 /* GPUImageNormalBlendFilter.m */; }; + BCF8687B1728721300912E34 /* GPUImagePoissonBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF868671728720600912E34 /* GPUImagePoissonBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8687C1728721300912E34 /* GPUImagePoissonBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868681728720700912E34 /* GPUImagePoissonBlendFilter.m */; }; + BCF8687D1728721300912E34 /* GPUImageSaturationBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF868691728720900912E34 /* GPUImageSaturationBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8687E1728721300912E34 /* GPUImageSaturationBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8686A1728720B00912E34 /* GPUImageSaturationBlendFilter.m */; }; + BCF8687F1728721300912E34 /* GPUImageSoftLightBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8686B1728720C00912E34 /* GPUImageSoftLightBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868801728721300912E34 /* GPUImageSoftLightBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8686C1728720E00912E34 /* GPUImageSoftLightBlendFilter.m */; }; + BCF868811728721300912E34 /* GPUImageSubtractBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8686D1728720F00912E34 /* GPUImageSubtractBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868821728721300912E34 /* GPUImageSubtractBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8686E1728721100912E34 /* GPUImageSubtractBlendFilter.m */; }; + BCF868851728773900912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF868831728771D00912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868861728773900912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868841728772600912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.m */; }; + BCF8688F17287ED000912E34 /* GPUImageChromaKeyBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8688B17287EB500912E34 /* GPUImageChromaKeyBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8689017287ED000912E34 /* GPUImageChromaKeyBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8688C17287EBB00912E34 /* GPUImageChromaKeyBlendFilter.m */; }; + BCF8689117287ED000912E34 /* GPUImageDissolveBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8688D17287EC400912E34 /* GPUImageDissolveBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8689217287ED000912E34 /* GPUImageDissolveBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8688E17287ECC00912E34 /* GPUImageDissolveBlendFilter.m */; }; + BCF86895172880B800912E34 /* GPUImageSourceOverBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF86893172880AB00912E34 /* GPUImageSourceOverBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF86896172880B800912E34 /* GPUImageSourceOverBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF86894172880B100912E34 /* GPUImageSourceOverBlendFilter.m */; }; + BCF86899172883A300912E34 /* GPUImageMaskFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF868971728839800912E34 /* GPUImageMaskFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8689A172883A300912E34 /* GPUImageMaskFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868981728839C00912E34 /* GPUImageMaskFilter.m */; }; + BCF8689D1728862100912E34 /* GPUImageOpacityFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8689B1728861E00912E34 /* GPUImageOpacityFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF8689E1728862100912E34 /* GPUImageOpacityFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF8689C1728861F00912E34 /* GPUImageOpacityFilter.m */; }; + BCF868A11728866400912E34 /* GPUImageAlphaBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF8689F1728865500912E34 /* GPUImageAlphaBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; + BCF868A21728866400912E34 /* GPUImageAlphaBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF868A01728865D00912E34 /* GPUImageAlphaBlendFilter.m */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + A87E5E10177648F3007FD5B1 /* GPUImageRawDataOutput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRawDataOutput.h; path = Source/GPUImageRawDataOutput.h; sourceTree = SOURCE_ROOT; }; + A87E5E11177648F3007FD5B1 /* GPUImageRawDataOutput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRawDataOutput.m; path = Source/GPUImageRawDataOutput.m; sourceTree = SOURCE_ROOT; }; + A87E5E1417764B16007FD5B1 /* GPUImageRawDataInput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRawDataInput.h; path = Source/GPUImageRawDataInput.h; sourceTree = SOURCE_ROOT; }; + A87E5E1517764B16007FD5B1 /* GPUImageRawDataInput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRawDataInput.m; path = Source/GPUImageRawDataInput.m; sourceTree = SOURCE_ROOT; }; + BC569576174683620081491B /* GPUImageCrosshairGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageCrosshairGenerator.h; path = Source/GPUImageCrosshairGenerator.h; sourceTree = SOURCE_ROOT; }; + BC569577174683620081491B /* GPUImageCrosshairGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageCrosshairGenerator.m; path = Source/GPUImageCrosshairGenerator.m; sourceTree = SOURCE_ROOT; }; + BC569578174683620081491B /* GPUImageLineGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLineGenerator.h; path = Source/GPUImageLineGenerator.h; sourceTree = SOURCE_ROOT; }; + BC569579174683620081491B /* GPUImageLineGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLineGenerator.m; path = Source/GPUImageLineGenerator.m; sourceTree = SOURCE_ROOT; }; + BC6C553A1730636600EB222D /* GPUImageLaplacianFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLaplacianFilter.h; path = Source/GPUImageLaplacianFilter.h; sourceTree = SOURCE_ROOT; }; + BC6C553B1730636600EB222D /* GPUImageLaplacianFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLaplacianFilter.m; path = Source/GPUImageLaplacianFilter.m; sourceTree = SOURCE_ROOT; }; + BC6C55441730BDCF00EB222D /* GPUImageLanczosResamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLanczosResamplingFilter.h; path = Source/GPUImageLanczosResamplingFilter.h; sourceTree = SOURCE_ROOT; }; + BC6C55451730BDCF00EB222D /* GPUImageLanczosResamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLanczosResamplingFilter.m; path = Source/GPUImageLanczosResamplingFilter.m; sourceTree = SOURCE_ROOT; }; + BC78B706172DCCB800342C6A /* GPUImageAmatorkaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAmatorkaFilter.h; path = Source/GPUImageAmatorkaFilter.h; sourceTree = SOURCE_ROOT; }; + BC78B707172DCCB800342C6A /* GPUImageAmatorkaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAmatorkaFilter.m; path = Source/GPUImageAmatorkaFilter.m; sourceTree = SOURCE_ROOT; }; + BC78B708172DCCB800342C6A /* GPUImageLookupFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLookupFilter.h; path = Source/GPUImageLookupFilter.h; sourceTree = SOURCE_ROOT; }; + BC78B709172DCCB800342C6A /* GPUImageLookupFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLookupFilter.m; path = Source/GPUImageLookupFilter.m; sourceTree = SOURCE_ROOT; }; + BC78B70A172DCCB800342C6A /* GPUImageMissEtikateFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMissEtikateFilter.h; path = Source/GPUImageMissEtikateFilter.h; sourceTree = SOURCE_ROOT; }; + BC78B70B172DCCB800342C6A /* GPUImageMissEtikateFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMissEtikateFilter.m; path = Source/GPUImageMissEtikateFilter.m; sourceTree = SOURCE_ROOT; }; + BC78B70C172DCCB800342C6A /* GPUImageSoftEleganceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSoftEleganceFilter.h; path = Source/GPUImageSoftEleganceFilter.h; sourceTree = SOURCE_ROOT; }; + BC78B70D172DCCB800342C6A /* GPUImageSoftEleganceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSoftEleganceFilter.m; path = Source/GPUImageSoftEleganceFilter.m; sourceTree = SOURCE_ROOT; }; + BC8A583018124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSingleComponentGaussianBlurFilter.h; path = Source/GPUImageSingleComponentGaussianBlurFilter.h; sourceTree = SOURCE_ROOT; }; + BC8A583118124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSingleComponentGaussianBlurFilter.m; path = Source/GPUImageSingleComponentGaussianBlurFilter.m; sourceTree = SOURCE_ROOT; }; + BC96A4EF176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageNonMaximumSuppressionFilter.h; path = Source/GPUImageNonMaximumSuppressionFilter.h; sourceTree = SOURCE_ROOT; }; + BC96A4F0176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageNonMaximumSuppressionFilter.m; path = Source/GPUImageNonMaximumSuppressionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B4F1746C9EC0097C84A /* GPUImageBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageBuffer.h; path = Source/GPUImageBuffer.h; sourceTree = SOURCE_ROOT; }; + BCA20B501746C9EC0097C84A /* GPUImageBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageBuffer.m; path = Source/GPUImageBuffer.m; sourceTree = SOURCE_ROOT; }; + BCA20B511746C9EC0097C84A /* GPUImageHighPassFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHighPassFilter.h; path = Source/GPUImageHighPassFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B521746C9EC0097C84A /* GPUImageHighPassFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHighPassFilter.m; path = Source/GPUImageHighPassFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B531746C9EC0097C84A /* GPUImageLowPassFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLowPassFilter.h; path = Source/GPUImageLowPassFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B541746C9EC0097C84A /* GPUImageLowPassFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLowPassFilter.m; path = Source/GPUImageLowPassFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B551746C9EC0097C84A /* GPUImageMotionDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMotionDetector.h; path = Source/GPUImageMotionDetector.h; sourceTree = SOURCE_ROOT; }; + BCA20B561746C9EC0097C84A /* GPUImageMotionDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMotionDetector.m; path = Source/GPUImageMotionDetector.m; sourceTree = SOURCE_ROOT; }; + BCA20B5F1746CFE60097C84A /* GPUImageThresholdSketchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageThresholdSketchFilter.h; path = Source/GPUImageThresholdSketchFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B601746CFE60097C84A /* GPUImageThresholdSketchFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageThresholdSketchFilter.m; path = Source/GPUImageThresholdSketchFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B631746D3C30097C84A /* GPUImageSmoothToonFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSmoothToonFilter.h; path = Source/GPUImageSmoothToonFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B641746D3C30097C84A /* GPUImageSmoothToonFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSmoothToonFilter.m; path = Source/GPUImageSmoothToonFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B671746D3DC0097C84A /* GPUImageTiltShiftFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTiltShiftFilter.h; path = Source/GPUImageTiltShiftFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B681746D3DC0097C84A /* GPUImageTiltShiftFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTiltShiftFilter.m; path = Source/GPUImageTiltShiftFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B6B1746D44A0097C84A /* GPUImageCGAColorspaceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageCGAColorspaceFilter.h; path = Source/GPUImageCGAColorspaceFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B6C1746D44A0097C84A /* GPUImageCGAColorspaceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageCGAColorspaceFilter.m; path = Source/GPUImageCGAColorspaceFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B6F1746D4AC0097C84A /* GPUImagePosterizeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePosterizeFilter.h; path = Source/GPUImagePosterizeFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B701746D4AC0097C84A /* GPUImagePosterizeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePosterizeFilter.m; path = Source/GPUImagePosterizeFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B731746DE850097C84A /* GPUImageKuwaharaRadius3Filter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageKuwaharaRadius3Filter.h; path = Source/GPUImageKuwaharaRadius3Filter.h; sourceTree = SOURCE_ROOT; }; + BCA20B741746DE850097C84A /* GPUImageKuwaharaRadius3Filter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageKuwaharaRadius3Filter.m; path = Source/GPUImageKuwaharaRadius3Filter.m; sourceTree = SOURCE_ROOT; }; + BCA20B771746DEDD0097C84A /* GPUImageChromaKeyFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageChromaKeyFilter.h; path = Source/GPUImageChromaKeyFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B781746DEDE0097C84A /* GPUImageChromaKeyFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageChromaKeyFilter.m; path = Source/GPUImageChromaKeyFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B7B1746DF200097C84A /* GPUImageVignetteFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageVignetteFilter.h; path = Source/GPUImageVignetteFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B7C1746DF200097C84A /* GPUImageVignetteFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageVignetteFilter.m; path = Source/GPUImageVignetteFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B7F1747080E0097C84A /* GPUImageBulgeDistortionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageBulgeDistortionFilter.h; path = Source/GPUImageBulgeDistortionFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B801747080F0097C84A /* GPUImageBulgeDistortionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageBulgeDistortionFilter.m; path = Source/GPUImageBulgeDistortionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B811747080F0097C84A /* GPUImageClosingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageClosingFilter.h; path = Source/GPUImageClosingFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B82174708100097C84A /* GPUImageClosingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageClosingFilter.m; path = Source/GPUImageClosingFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B83174708110097C84A /* GPUImageDilationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageDilationFilter.h; path = Source/GPUImageDilationFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B84174708120097C84A /* GPUImageDilationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageDilationFilter.m; path = Source/GPUImageDilationFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B85174708120097C84A /* GPUImageErosionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageErosionFilter.h; path = Source/GPUImageErosionFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B86174708130097C84A /* GPUImageErosionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageErosionFilter.m; path = Source/GPUImageErosionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B87174708140097C84A /* GPUImageGlassSphereFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageGlassSphereFilter.h; path = Source/GPUImageGlassSphereFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B88174708150097C84A /* GPUImageGlassSphereFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageGlassSphereFilter.m; path = Source/GPUImageGlassSphereFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B89174708150097C84A /* GPUImagePinchDistortionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePinchDistortionFilter.h; path = Source/GPUImagePinchDistortionFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B8A174708150097C84A /* GPUImagePinchDistortionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePinchDistortionFilter.m; path = Source/GPUImagePinchDistortionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B8B174708160097C84A /* GPUImageRGBClosingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRGBClosingFilter.h; path = Source/GPUImageRGBClosingFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B8C174708160097C84A /* GPUImageRGBClosingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRGBClosingFilter.m; path = Source/GPUImageRGBClosingFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B8D174708190097C84A /* GPUImageRGBDilationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRGBDilationFilter.h; path = Source/GPUImageRGBDilationFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B8E174708190097C84A /* GPUImageRGBDilationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRGBDilationFilter.m; path = Source/GPUImageRGBDilationFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B8F174708190097C84A /* GPUImageRGBErosionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRGBErosionFilter.h; path = Source/GPUImageRGBErosionFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B901747081A0097C84A /* GPUImageRGBErosionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRGBErosionFilter.m; path = Source/GPUImageRGBErosionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B911747081A0097C84A /* GPUImageRGBOpeningFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRGBOpeningFilter.h; path = Source/GPUImageRGBOpeningFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B921747081B0097C84A /* GPUImageRGBOpeningFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRGBOpeningFilter.m; path = Source/GPUImageRGBOpeningFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B931747081B0097C84A /* GPUImageSphereRefractionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSphereRefractionFilter.h; path = Source/GPUImageSphereRefractionFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B941747081B0097C84A /* GPUImageSphereRefractionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSphereRefractionFilter.m; path = Source/GPUImageSphereRefractionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B951747081C0097C84A /* GPUImageStretchDistortionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageStretchDistortionFilter.h; path = Source/GPUImageStretchDistortionFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B961747081C0097C84A /* GPUImageStretchDistortionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageStretchDistortionFilter.m; path = Source/GPUImageStretchDistortionFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20B971747081C0097C84A /* GPUImageSwirlFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSwirlFilter.h; path = Source/GPUImageSwirlFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20B981747081D0097C84A /* GPUImageSwirlFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSwirlFilter.m; path = Source/GPUImageSwirlFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20BB3174708610097C84A /* GPUImageOpeningFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageOpeningFilter.h; path = Source/GPUImageOpeningFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20BB4174708620097C84A /* GPUImageOpeningFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageOpeningFilter.m; path = Source/GPUImageOpeningFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20BB717471C3A0097C84A /* GPUImageJFAVoronoiFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageJFAVoronoiFilter.h; path = Source/GPUImageJFAVoronoiFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20BB817471C3C0097C84A /* GPUImageJFAVoronoiFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageJFAVoronoiFilter.m; path = Source/GPUImageJFAVoronoiFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20BB917471C3D0097C84A /* GPUImageLocalBinaryPatternFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLocalBinaryPatternFilter.h; path = Source/GPUImageLocalBinaryPatternFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20BBA17471C3F0097C84A /* GPUImageLocalBinaryPatternFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLocalBinaryPatternFilter.m; path = Source/GPUImageLocalBinaryPatternFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20BBB17471C400097C84A /* GPUImageMosaicFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMosaicFilter.h; path = Source/GPUImageMosaicFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20BBC17471C420097C84A /* GPUImageMosaicFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMosaicFilter.m; path = Source/GPUImageMosaicFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20BBD17471C440097C84A /* GPUImagePerlinNoiseFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePerlinNoiseFilter.h; path = Source/GPUImagePerlinNoiseFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20BBE17471C460097C84A /* GPUImagePerlinNoiseFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePerlinNoiseFilter.m; path = Source/GPUImagePerlinNoiseFilter.m; sourceTree = SOURCE_ROOT; }; + BCA20BBF17471C480097C84A /* GPUImageVoronoiConsumerFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageVoronoiConsumerFilter.h; path = Source/GPUImageVoronoiConsumerFilter.h; sourceTree = SOURCE_ROOT; }; + BCA20BC017471C490097C84A /* GPUImageVoronoiConsumerFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageVoronoiConsumerFilter.m; path = Source/GPUImageVoronoiConsumerFilter.m; sourceTree = SOURCE_ROOT; }; + BCA3F31C17239B6500E28AEC /* GPUImage.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = GPUImage.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + BCA3F31F17239B6500E28AEC /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = System/Library/Frameworks/Cocoa.framework; sourceTree = SDKROOT; }; + BCA3F32217239B6500E28AEC /* AppKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AppKit.framework; path = System/Library/Frameworks/AppKit.framework; sourceTree = SDKROOT; }; + BCA3F32317239B6500E28AEC /* CoreData.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreData.framework; path = System/Library/Frameworks/CoreData.framework; sourceTree = SDKROOT; }; + BCA3F32417239B6500E28AEC /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; + BCA3F32717239B6500E28AEC /* GPUImageMac-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; name = "GPUImageMac-Info.plist"; path = "Source/Mac/GPUImageMac-Info.plist"; sourceTree = ""; }; + BCA3F32B17239B6500E28AEC /* GPUImageMac-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImageMac-Prefix.pch"; path = "Source/Mac/GPUImageMac-Prefix.pch"; sourceTree = ""; }; + BCA3F3601723A8FA00E28AEC /* OpenGL.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGL.framework; path = System/Library/Frameworks/OpenGL.framework; sourceTree = SDKROOT; }; + BCA3F3611723A8FA00E28AEC /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; }; + BCA3F3641723A96600E28AEC /* GPUImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImage.h; path = Source/Mac/GPUImage.h; sourceTree = SOURCE_ROOT; }; + BCAE41E21731C3000020F80D /* GPUImageThreeInputFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageThreeInputFilter.h; path = Source/GPUImageThreeInputFilter.h; sourceTree = SOURCE_ROOT; }; + BCAE41E31731C3000020F80D /* GPUImageThreeInputFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageThreeInputFilter.m; path = Source/GPUImageThreeInputFilter.m; sourceTree = SOURCE_ROOT; }; + BCB009E4172A1BBD00DB804C /* GPUImageGammaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageGammaFilter.h; path = Source/GPUImageGammaFilter.h; sourceTree = SOURCE_ROOT; }; + BCB009E5172A1BBD00DB804C /* GPUImageGammaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageGammaFilter.m; path = Source/GPUImageGammaFilter.m; sourceTree = SOURCE_ROOT; }; + BCB009E8172A1BE800DB804C /* GPUImageToneCurveFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageToneCurveFilter.h; path = Source/GPUImageToneCurveFilter.h; sourceTree = SOURCE_ROOT; }; + BCB009E9172A1BE800DB804C /* GPUImageToneCurveFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageToneCurveFilter.m; path = Source/GPUImageToneCurveFilter.m; sourceTree = SOURCE_ROOT; }; + BCB009EC172A23A900DB804C /* GPUImageHazeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHazeFilter.h; path = Source/GPUImageHazeFilter.h; sourceTree = SOURCE_ROOT; }; + BCB009ED172A23A900DB804C /* GPUImageHazeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHazeFilter.m; path = Source/GPUImageHazeFilter.m; sourceTree = SOURCE_ROOT; }; + BCB009EE172A23AA00DB804C /* GPUImageHighlightShadowFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHighlightShadowFilter.h; path = Source/GPUImageHighlightShadowFilter.h; sourceTree = SOURCE_ROOT; }; + BCB009EF172A23AA00DB804C /* GPUImageHighlightShadowFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHighlightShadowFilter.m; path = Source/GPUImageHighlightShadowFilter.m; sourceTree = SOURCE_ROOT; }; + BCB009F4172A243300DB804C /* GPUImageSepiaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSepiaFilter.h; path = Source/GPUImageSepiaFilter.h; sourceTree = SOURCE_ROOT; }; + BCB009F5172A243400DB804C /* GPUImageSepiaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSepiaFilter.m; path = Source/GPUImageSepiaFilter.m; sourceTree = SOURCE_ROOT; }; + BCB009F8172A246D00DB804C /* GPUImageColorMatrixFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorMatrixFilter.h; path = Source/GPUImageColorMatrixFilter.h; sourceTree = SOURCE_ROOT; }; + BCB009F9172A246E00DB804C /* GPUImageColorMatrixFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorMatrixFilter.m; path = Source/GPUImageColorMatrixFilter.m; sourceTree = SOURCE_ROOT; }; + BCBF692F173061D200E5792A /* GPUImage3x3ConvolutionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImage3x3ConvolutionFilter.h; path = Source/GPUImage3x3ConvolutionFilter.h; sourceTree = SOURCE_ROOT; }; + BCBF6930173061D200E5792A /* GPUImage3x3ConvolutionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImage3x3ConvolutionFilter.m; path = Source/GPUImage3x3ConvolutionFilter.m; sourceTree = SOURCE_ROOT; }; + BCBF69331730623200E5792A /* GPUImageEmbossFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageEmbossFilter.h; path = Source/GPUImageEmbossFilter.h; sourceTree = SOURCE_ROOT; }; + BCBF69341730623200E5792A /* GPUImageEmbossFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageEmbossFilter.m; path = Source/GPUImageEmbossFilter.m; sourceTree = SOURCE_ROOT; }; + BCC0DF4017358315007C485F /* GPUImageColorInvertFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorInvertFilter.h; path = Source/GPUImageColorInvertFilter.h; sourceTree = SOURCE_ROOT; }; + BCC0DF4117358315007C485F /* GPUImageColorInvertFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorInvertFilter.m; path = Source/GPUImageColorInvertFilter.m; sourceTree = SOURCE_ROOT; }; + BCC0DF4417359529007C485F /* GPUImageHistogramFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHistogramFilter.h; path = Source/GPUImageHistogramFilter.h; sourceTree = SOURCE_ROOT; }; + BCC0DF4517359529007C485F /* GPUImageHistogramFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHistogramFilter.m; path = Source/GPUImageHistogramFilter.m; sourceTree = SOURCE_ROOT; }; + BCC0DF4617359529007C485F /* GPUImageHistogramGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHistogramGenerator.h; path = Source/GPUImageHistogramGenerator.h; sourceTree = SOURCE_ROOT; }; + BCC0DF4717359529007C485F /* GPUImageHistogramGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHistogramGenerator.m; path = Source/GPUImageHistogramGenerator.m; sourceTree = SOURCE_ROOT; }; + BCC0DF4C173595D3007C485F /* GPUImageAverageColor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAverageColor.h; path = Source/GPUImageAverageColor.h; sourceTree = SOURCE_ROOT; }; + BCC0DF4D173595D3007C485F /* GPUImageAverageColor.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAverageColor.m; path = Source/GPUImageAverageColor.m; sourceTree = SOURCE_ROOT; }; + BCC0DF4E173595D3007C485F /* GPUImageLuminosity.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLuminosity.h; path = Source/GPUImageLuminosity.h; sourceTree = SOURCE_ROOT; }; + BCC0DF4F173595D3007C485F /* GPUImageLuminosity.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLuminosity.m; path = Source/GPUImageLuminosity.m; sourceTree = SOURCE_ROOT; }; + BCC0DF50173595D3007C485F /* GPUImageSolidColorGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSolidColorGenerator.h; path = Source/GPUImageSolidColorGenerator.h; sourceTree = SOURCE_ROOT; }; + BCC0DF51173595D3007C485F /* GPUImageSolidColorGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSolidColorGenerator.m; path = Source/GPUImageSolidColorGenerator.m; sourceTree = SOURCE_ROOT; }; + BCC0DF58173596A8007C485F /* GPUImageAdaptiveThresholdFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAdaptiveThresholdFilter.h; path = Source/GPUImageAdaptiveThresholdFilter.h; sourceTree = SOURCE_ROOT; }; + BCC0DF59173596A8007C485F /* GPUImageAdaptiveThresholdFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAdaptiveThresholdFilter.m; path = Source/GPUImageAdaptiveThresholdFilter.m; sourceTree = SOURCE_ROOT; }; + BCC0DF5C173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAverageLuminanceThresholdFilter.h; path = Source/GPUImageAverageLuminanceThresholdFilter.h; sourceTree = SOURCE_ROOT; }; + BCC0DF5D173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAverageLuminanceThresholdFilter.m; path = Source/GPUImageAverageLuminanceThresholdFilter.m; sourceTree = SOURCE_ROOT; }; + BCC0DF6017359F42007C485F /* GPUImageLuminanceThresholdFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLuminanceThresholdFilter.h; path = Source/GPUImageLuminanceThresholdFilter.h; sourceTree = SOURCE_ROOT; }; + BCC0DF6117359F42007C485F /* GPUImageLuminanceThresholdFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLuminanceThresholdFilter.m; path = Source/GPUImageLuminanceThresholdFilter.m; sourceTree = SOURCE_ROOT; }; + BCE2EF8217459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHarrisCornerDetectionFilter.h; path = Source/GPUImageHarrisCornerDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE2EF8317459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHarrisCornerDetectionFilter.m; path = Source/GPUImageHarrisCornerDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE2EF8417459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageNobleCornerDetectionFilter.h; path = Source/GPUImageNobleCornerDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE2EF8517459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageNobleCornerDetectionFilter.m; path = Source/GPUImageNobleCornerDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE2EF8617459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageShiTomasiFeatureDetectionFilter.h; path = Source/GPUImageShiTomasiFeatureDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE2EF8717459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageShiTomasiFeatureDetectionFilter.m; path = Source/GPUImageShiTomasiFeatureDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE2EF8E17459D9300C0628C /* GPUImageColorPackingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorPackingFilter.h; path = Source/GPUImageColorPackingFilter.h; sourceTree = SOURCE_ROOT; }; + BCE2EF8F17459D9300C0628C /* GPUImageColorPackingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorPackingFilter.m; path = Source/GPUImageColorPackingFilter.m; sourceTree = SOURCE_ROOT; }; + BCE2EF9017459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageThresholdedNonMaximumSuppressionFilter.h; path = Source/GPUImageThresholdedNonMaximumSuppressionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE2EF9117459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageThresholdedNonMaximumSuppressionFilter.m; path = Source/GPUImageThresholdedNonMaximumSuppressionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE2EF9617459E2400C0628C /* GPUImageHoughTransformLineDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHoughTransformLineDetector.h; path = Source/GPUImageHoughTransformLineDetector.h; sourceTree = SOURCE_ROOT; }; + BCE2EF9717459E2400C0628C /* GPUImageHoughTransformLineDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHoughTransformLineDetector.m; path = Source/GPUImageHoughTransformLineDetector.m; sourceTree = SOURCE_ROOT; }; + BCE2EF9A17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageParallelCoordinateLineTransformFilter.h; path = Source/GPUImageParallelCoordinateLineTransformFilter.h; sourceTree = SOURCE_ROOT; }; + BCE2EF9B17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageParallelCoordinateLineTransformFilter.m; path = Source/GPUImageParallelCoordinateLineTransformFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817A01735FDC60071D084 /* GPUImageHalftoneFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHalftoneFilter.h; path = Source/GPUImageHalftoneFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817A11735FDC60071D084 /* GPUImageHalftoneFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHalftoneFilter.m; path = Source/GPUImageHalftoneFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817A21735FDC60071D084 /* GPUImagePixellatePositionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePixellatePositionFilter.h; path = Source/GPUImagePixellatePositionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817A31735FDC60071D084 /* GPUImagePixellatePositionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePixellatePositionFilter.m; path = Source/GPUImagePixellatePositionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817A41735FDC70071D084 /* GPUImagePolarPixellateFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePolarPixellateFilter.h; path = Source/GPUImagePolarPixellateFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817A51735FDC70071D084 /* GPUImagePolarPixellateFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePolarPixellateFilter.m; path = Source/GPUImagePolarPixellateFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817A61735FDC70071D084 /* GPUImagePolkaDotFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePolkaDotFilter.h; path = Source/GPUImagePolkaDotFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817A71735FDC70071D084 /* GPUImagePolkaDotFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePolkaDotFilter.m; path = Source/GPUImagePolkaDotFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817B01735FE150071D084 /* GPUImageCrosshatchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageCrosshatchFilter.h; path = Source/GPUImageCrosshatchFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817B11735FE150071D084 /* GPUImageCrosshatchFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageCrosshatchFilter.m; path = Source/GPUImageCrosshatchFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817B4173608690071D084 /* GPUImageCannyEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageCannyEdgeDetectionFilter.h; path = Source/GPUImageCannyEdgeDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817B5173608690071D084 /* GPUImageCannyEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageCannyEdgeDetectionFilter.m; path = Source/GPUImageCannyEdgeDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817B6173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePrewittEdgeDetectionFilter.h; path = Source/GPUImagePrewittEdgeDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817B7173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePrewittEdgeDetectionFilter.m; path = Source/GPUImagePrewittEdgeDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817B8173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageThresholdEdgeDetectionFilter.h; path = Source/GPUImageThresholdEdgeDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817B9173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageThresholdEdgeDetectionFilter.m; path = Source/GPUImageThresholdEdgeDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817C01736092A0071D084 /* GPUImageXYDerivativeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageXYDerivativeFilter.h; path = Source/GPUImageXYDerivativeFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817C11736092A0071D084 /* GPUImageXYDerivativeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageXYDerivativeFilter.m; path = Source/GPUImageXYDerivativeFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817C817360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageDirectionalNonMaximumSuppressionFilter.h; path = Source/GPUImageDirectionalNonMaximumSuppressionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817C917360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageDirectionalNonMaximumSuppressionFilter.m; path = Source/GPUImageDirectionalNonMaximumSuppressionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817CA17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUimageDirectionalSobelEdgeDetectionFilter.h; path = Source/GPUimageDirectionalSobelEdgeDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817CB17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUimageDirectionalSobelEdgeDetectionFilter.m; path = Source/GPUimageDirectionalSobelEdgeDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCE817D017360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageWeakPixelInclusionFilter.h; path = Source/GPUImageWeakPixelInclusionFilter.h; sourceTree = SOURCE_ROOT; }; + BCE817D117360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageWeakPixelInclusionFilter.m; path = Source/GPUImageWeakPixelInclusionFilter.m; sourceTree = SOURCE_ROOT; }; + BCF40F1017247D68005AE36A /* GPUImageContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageContext.h; path = Source/Mac/GPUImageContext.h; sourceTree = SOURCE_ROOT; }; + BCF40F1117247D68005AE36A /* GPUImageContext.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageContext.m; path = Source/Mac/GPUImageContext.m; sourceTree = SOURCE_ROOT; }; + BCF40F1417247FD8005AE36A /* GLProgram.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GLProgram.h; path = Source/GLProgram.h; sourceTree = SOURCE_ROOT; }; + BCF40F1517247FD8005AE36A /* GLProgram.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GLProgram.m; path = Source/GLProgram.m; sourceTree = SOURCE_ROOT; }; + BCF40F191724829B005AE36A /* GPUImageOutput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageOutput.h; path = Source/GPUImageOutput.h; sourceTree = SOURCE_ROOT; }; + BCF40F1A1724829B005AE36A /* GPUImageOutput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageOutput.m; path = Source/GPUImageOutput.m; sourceTree = SOURCE_ROOT; }; + BCF40F1E17248322005AE36A /* GPUImageFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageFilter.h; path = Source/GPUImageFilter.h; sourceTree = SOURCE_ROOT; }; + BCF40F1F17248322005AE36A /* GPUImageFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageFilter.m; path = Source/GPUImageFilter.m; sourceTree = SOURCE_ROOT; }; + BCF40F2317248836005AE36A /* GPUImageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageView.h; path = Source/Mac/GPUImageView.h; sourceTree = SOURCE_ROOT; }; + BCF40F2417248836005AE36A /* GPUImageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageView.m; path = Source/Mac/GPUImageView.m; sourceTree = SOURCE_ROOT; }; + BCF40F27172488D3005AE36A /* GPUImageBrightnessFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageBrightnessFilter.h; path = Source/GPUImageBrightnessFilter.h; sourceTree = SOURCE_ROOT; }; + BCF40F28172488D3005AE36A /* GPUImageBrightnessFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageBrightnessFilter.m; path = Source/GPUImageBrightnessFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867521725AF9300912E34 /* GPUImageMovieWriter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMovieWriter.h; path = Source/Mac/GPUImageMovieWriter.h; sourceTree = SOURCE_ROOT; }; + BCF867531725AF9300912E34 /* GPUImageMovieWriter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMovieWriter.m; path = Source/Mac/GPUImageMovieWriter.m; sourceTree = SOURCE_ROOT; }; + BCF867541725AF9300912E34 /* GPUImagePicture.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePicture.h; path = Source/Mac/GPUImagePicture.h; sourceTree = SOURCE_ROOT; }; + BCF867551725AF9300912E34 /* GPUImagePicture.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePicture.m; path = Source/Mac/GPUImagePicture.m; sourceTree = SOURCE_ROOT; }; + BCF8675A1725BBE300912E34 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; + BCF8675C1725BBE900912E34 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; + BCF8679B1727585D00912E34 /* GPUImageAVCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAVCamera.h; path = Source/Mac/GPUImageAVCamera.h; sourceTree = SOURCE_ROOT; }; + BCF8679C1727585D00912E34 /* GPUImageAVCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAVCamera.m; path = Source/Mac/GPUImageAVCamera.m; sourceTree = SOURCE_ROOT; }; + BCF867A3172778D500912E34 /* GPUImageContrastFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageContrastFilter.h; path = Source/GPUImageContrastFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867A4172778D500912E34 /* GPUImageContrastFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageContrastFilter.m; path = Source/GPUImageContrastFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867A5172778D500912E34 /* GPUImageExposureFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageExposureFilter.h; path = Source/GPUImageExposureFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867A6172778D500912E34 /* GPUImageExposureFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageExposureFilter.m; path = Source/GPUImageExposureFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867A7172778D500912E34 /* GPUImageHueFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHueFilter.h; path = Source/GPUImageHueFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867A8172778D500912E34 /* GPUImageHueFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHueFilter.m; path = Source/GPUImageHueFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867A9172778D500912E34 /* GPUImageSaturationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSaturationFilter.h; path = Source/GPUImageSaturationFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867AA172778D500912E34 /* GPUImageSaturationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSaturationFilter.m; path = Source/GPUImageSaturationFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867AB172778D500912E34 /* GPUImageWhiteBalanceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageWhiteBalanceFilter.h; path = Source/GPUImageWhiteBalanceFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867AC172778D500912E34 /* GPUImageWhiteBalanceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageWhiteBalanceFilter.m; path = Source/GPUImageWhiteBalanceFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867B71727791200912E34 /* GPUImageLevelsFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLevelsFilter.h; path = Source/GPUImageLevelsFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867B81727791200912E34 /* GPUImageLevelsFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLevelsFilter.m; path = Source/GPUImageLevelsFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867BB1727794000912E34 /* GPUImageMonochromeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMonochromeFilter.h; path = Source/GPUImageMonochromeFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867BC1727794000912E34 /* GPUImageMonochromeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMonochromeFilter.m; path = Source/GPUImageMonochromeFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867BD1727794000912E34 /* GPUImageRGBFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRGBFilter.h; path = Source/GPUImageRGBFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867BE1727794000912E34 /* GPUImageRGBFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRGBFilter.m; path = Source/GPUImageRGBFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867C51727872D00912E34 /* GPUImagePixellateFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePixellateFilter.h; path = Source/GPUImagePixellateFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867C61727872D00912E34 /* GPUImagePixellateFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePixellateFilter.m; path = Source/GPUImagePixellateFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867C9172788F100912E34 /* GPUImage3x3TextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImage3x3TextureSamplingFilter.h; path = Source/GPUImage3x3TextureSamplingFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867CA172788F100912E34 /* GPUImage3x3TextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImage3x3TextureSamplingFilter.m; path = Source/GPUImage3x3TextureSamplingFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867CD172789B200912E34 /* GPUImageSketchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSketchFilter.h; path = Source/GPUImageSketchFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867CE172789B200912E34 /* GPUImageSketchFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSketchFilter.m; path = Source/GPUImageSketchFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867D217278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSobelEdgeDetectionFilter.h; path = Source/GPUImageSobelEdgeDetectionFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867D317278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSobelEdgeDetectionFilter.m; path = Source/GPUImageSobelEdgeDetectionFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867D617278A7900912E34 /* GPUImageToonFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageToonFilter.h; path = Source/GPUImageToonFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867D717278A7900912E34 /* GPUImageToonFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageToonFilter.m; path = Source/GPUImageToonFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867DA17278C5800912E34 /* GPUImageTwoPassFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTwoPassFilter.h; path = Source/GPUImageTwoPassFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867DB17278C5900912E34 /* GPUImageTwoPassFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTwoPassFilter.m; path = Source/GPUImageTwoPassFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867DE17278D1900912E34 /* GPUImageGrayscaleFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageGrayscaleFilter.h; path = Source/GPUImageGrayscaleFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867DF17278D1900912E34 /* GPUImageGrayscaleFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageGrayscaleFilter.m; path = Source/GPUImageGrayscaleFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867E21727903C00912E34 /* GPUImageKuwaharaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageKuwaharaFilter.h; path = Source/GPUImageKuwaharaFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867E31727903D00912E34 /* GPUImageKuwaharaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageKuwaharaFilter.m; path = Source/GPUImageKuwaharaFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867E61728477200912E34 /* GPUImageFalseColorFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageFalseColorFilter.h; path = Source/GPUImageFalseColorFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867E71728477200912E34 /* GPUImageFalseColorFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageFalseColorFilter.m; path = Source/GPUImageFalseColorFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867EA172847A000912E34 /* GPUImageSharpenFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSharpenFilter.h; path = Source/GPUImageSharpenFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867EB172847A000912E34 /* GPUImageSharpenFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSharpenFilter.m; path = Source/GPUImageSharpenFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867EE1728487000912E34 /* GPUImageUnsharpMaskFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageUnsharpMaskFilter.h; path = Source/GPUImageUnsharpMaskFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867EF1728487000912E34 /* GPUImageUnsharpMaskFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageUnsharpMaskFilter.m; path = Source/GPUImageUnsharpMaskFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867F21728490300912E34 /* GPUImageTwoInputFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTwoInputFilter.h; path = Source/GPUImageTwoInputFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867F31728490500912E34 /* GPUImageTwoInputFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTwoInputFilter.m; path = Source/GPUImageTwoInputFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867F61728494B00912E34 /* GPUImageGaussianBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageGaussianBlurFilter.h; path = Source/GPUImageGaussianBlurFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867F71728494B00912E34 /* GPUImageGaussianBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageGaussianBlurFilter.m; path = Source/GPUImageGaussianBlurFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867FA1728496A00912E34 /* GPUImageTwoPassTextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTwoPassTextureSamplingFilter.h; path = Source/GPUImageTwoPassTextureSamplingFilter.h; sourceTree = SOURCE_ROOT; }; + BCF867FB1728496C00912E34 /* GPUImageTwoPassTextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTwoPassTextureSamplingFilter.m; path = Source/GPUImageTwoPassTextureSamplingFilter.m; sourceTree = SOURCE_ROOT; }; + BCF867FE17284C3C00912E34 /* GPUImageFilterGroup.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageFilterGroup.h; path = Source/GPUImageFilterGroup.h; sourceTree = SOURCE_ROOT; }; + BCF867FF17284C3E00912E34 /* GPUImageFilterGroup.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageFilterGroup.m; path = Source/GPUImageFilterGroup.m; sourceTree = SOURCE_ROOT; }; + BCF8680217284CFB00912E34 /* GPUImageTransformFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTransformFilter.h; path = Source/GPUImageTransformFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8680317284CFB00912E34 /* GPUImageTransformFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTransformFilter.m; path = Source/GPUImageTransformFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8680617284D0A00912E34 /* GPUImageCropFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageCropFilter.h; path = Source/GPUImageCropFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8680717284D0A00912E34 /* GPUImageCropFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageCropFilter.m; path = Source/GPUImageCropFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8680C172853A500912E34 /* GPUImageGaussianBlurPositionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageGaussianBlurPositionFilter.h; path = Source/GPUImageGaussianBlurPositionFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8680D172853A600912E34 /* GPUImageGaussianBlurPositionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageGaussianBlurPositionFilter.m; path = Source/GPUImageGaussianBlurPositionFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8680E172853A700912E34 /* GPUImageGaussianSelectiveBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageGaussianSelectiveBlurFilter.h; path = Source/GPUImageGaussianSelectiveBlurFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8680F172853A800912E34 /* GPUImageGaussianSelectiveBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageGaussianSelectiveBlurFilter.m; path = Source/GPUImageGaussianSelectiveBlurFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86814172854AA00912E34 /* GPUImageBilateralFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageBilateralFilter.h; path = Source/GPUImageBilateralFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86815172854AC00912E34 /* GPUImageBilateralFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageBilateralFilter.m; path = Source/GPUImageBilateralFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86816172854AE00912E34 /* GPUImageBoxBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageBoxBlurFilter.h; path = Source/GPUImageBoxBlurFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86817172854B100912E34 /* GPUImageBoxBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageBoxBlurFilter.m; path = Source/GPUImageBoxBlurFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8681A172854B600912E34 /* GPUImageMedianFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMedianFilter.h; path = Source/GPUImageMedianFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8681B172854B800912E34 /* GPUImageMedianFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMedianFilter.m; path = Source/GPUImageMedianFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86824172857AD00912E34 /* GPUImageMotionBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMotionBlurFilter.h; path = Source/GPUImageMotionBlurFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86825172857B000912E34 /* GPUImageMotionBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMotionBlurFilter.m; path = Source/GPUImageMotionBlurFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86826172857B200912E34 /* GPUImageZoomBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageZoomBlurFilter.h; path = Source/GPUImageZoomBlurFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86827172857B600912E34 /* GPUImageZoomBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageZoomBlurFilter.m; path = Source/GPUImageZoomBlurFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8682D17286EDB00912E34 /* GPUImageAddBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAddBlendFilter.h; path = Source/GPUImageAddBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8682E17286EDB00912E34 /* GPUImageAddBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAddBlendFilter.m; path = Source/GPUImageAddBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8683317286F2800912E34 /* GPUImageColorBurnBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorBurnBlendFilter.h; path = Source/GPUImageColorBurnBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8683417286F2800912E34 /* GPUImageColorBurnBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorBurnBlendFilter.m; path = Source/GPUImageColorBurnBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8683517286F2800912E34 /* GPUImageDarkenBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageDarkenBlendFilter.h; path = Source/GPUImageDarkenBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8683617286F2800912E34 /* GPUImageDarkenBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageDarkenBlendFilter.m; path = Source/GPUImageDarkenBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8683717286F2800912E34 /* GPUImageDivideBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageDivideBlendFilter.h; path = Source/GPUImageDivideBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8683817286F2800912E34 /* GPUImageDivideBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageDivideBlendFilter.m; path = Source/GPUImageDivideBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8683917286F2800912E34 /* GPUImageLightenBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLightenBlendFilter.h; path = Source/GPUImageLightenBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8683A17286F2800912E34 /* GPUImageLightenBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLightenBlendFilter.m; path = Source/GPUImageLightenBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8683B17286F2800912E34 /* GPUImageMultiplyBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMultiplyBlendFilter.h; path = Source/GPUImageMultiplyBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8683C17286F2800912E34 /* GPUImageMultiplyBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMultiplyBlendFilter.m; path = Source/GPUImageMultiplyBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8683D17286F2800912E34 /* GPUImageOverlayBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageOverlayBlendFilter.h; path = Source/GPUImageOverlayBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8683E17286F2800912E34 /* GPUImageOverlayBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageOverlayBlendFilter.m; path = Source/GPUImageOverlayBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8684B1728715800912E34 /* GPUImageColorDodgeBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorDodgeBlendFilter.h; path = Source/GPUImageColorDodgeBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8684C1728715900912E34 /* GPUImageColorDodgeBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorDodgeBlendFilter.m; path = Source/GPUImageColorDodgeBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8684D1728715C00912E34 /* GPUImageDifferenceBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageDifferenceBlendFilter.h; path = Source/GPUImageDifferenceBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8684E1728715D00912E34 /* GPUImageDifferenceBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageDifferenceBlendFilter.m; path = Source/GPUImageDifferenceBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8684F1728715F00912E34 /* GPUImageLinearBurnBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLinearBurnBlendFilter.h; path = Source/GPUImageLinearBurnBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868501728716000912E34 /* GPUImageLinearBurnBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLinearBurnBlendFilter.m; path = Source/GPUImageLinearBurnBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF868511728716200912E34 /* GPUImageScreenBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageScreenBlendFilter.h; path = Source/GPUImageScreenBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868521728716300912E34 /* GPUImageScreenBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageScreenBlendFilter.m; path = Source/GPUImageScreenBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8685B172871EE00912E34 /* GPUImageColorBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorBlendFilter.h; path = Source/GPUImageColorBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8685C172871F000912E34 /* GPUImageColorBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorBlendFilter.m; path = Source/GPUImageColorBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8685D172871F300912E34 /* GPUImageExclusionBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageExclusionBlendFilter.h; path = Source/GPUImageExclusionBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8685E172871F500912E34 /* GPUImageExclusionBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageExclusionBlendFilter.m; path = Source/GPUImageExclusionBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8685F172871F800912E34 /* GPUImageHardLightBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHardLightBlendFilter.h; path = Source/GPUImageHardLightBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86860172871FB00912E34 /* GPUImageHardLightBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHardLightBlendFilter.m; path = Source/GPUImageHardLightBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86861172871FC00912E34 /* GPUImageHueBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHueBlendFilter.h; path = Source/GPUImageHueBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86862172871FD00912E34 /* GPUImageHueBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHueBlendFilter.m; path = Source/GPUImageHueBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86863172871FF00912E34 /* GPUImageLuminosityBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageLuminosityBlendFilter.h; path = Source/GPUImageLuminosityBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868641728720100912E34 /* GPUImageLuminosityBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageLuminosityBlendFilter.m; path = Source/GPUImageLuminosityBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF868651728720200912E34 /* GPUImageNormalBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageNormalBlendFilter.h; path = Source/GPUImageNormalBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868661728720400912E34 /* GPUImageNormalBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageNormalBlendFilter.m; path = Source/GPUImageNormalBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF868671728720600912E34 /* GPUImagePoissonBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePoissonBlendFilter.h; path = Source/GPUImagePoissonBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868681728720700912E34 /* GPUImagePoissonBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePoissonBlendFilter.m; path = Source/GPUImagePoissonBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF868691728720900912E34 /* GPUImageSaturationBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSaturationBlendFilter.h; path = Source/GPUImageSaturationBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8686A1728720B00912E34 /* GPUImageSaturationBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSaturationBlendFilter.m; path = Source/GPUImageSaturationBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8686B1728720C00912E34 /* GPUImageSoftLightBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSoftLightBlendFilter.h; path = Source/GPUImageSoftLightBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8686C1728720E00912E34 /* GPUImageSoftLightBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSoftLightBlendFilter.m; path = Source/GPUImageSoftLightBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8686D1728720F00912E34 /* GPUImageSubtractBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSubtractBlendFilter.h; path = Source/GPUImageSubtractBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8686E1728721100912E34 /* GPUImageSubtractBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSubtractBlendFilter.m; path = Source/GPUImageSubtractBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF868831728771D00912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTwoInputCrossTextureSamplingFilter.h; path = Source/GPUImageTwoInputCrossTextureSamplingFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868841728772600912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTwoInputCrossTextureSamplingFilter.m; path = Source/GPUImageTwoInputCrossTextureSamplingFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8688B17287EB500912E34 /* GPUImageChromaKeyBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageChromaKeyBlendFilter.h; path = Source/GPUImageChromaKeyBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8688C17287EBB00912E34 /* GPUImageChromaKeyBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageChromaKeyBlendFilter.m; path = Source/GPUImageChromaKeyBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8688D17287EC400912E34 /* GPUImageDissolveBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageDissolveBlendFilter.h; path = Source/GPUImageDissolveBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8688E17287ECC00912E34 /* GPUImageDissolveBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageDissolveBlendFilter.m; path = Source/GPUImageDissolveBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF86893172880AB00912E34 /* GPUImageSourceOverBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSourceOverBlendFilter.h; path = Source/GPUImageSourceOverBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF86894172880B100912E34 /* GPUImageSourceOverBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSourceOverBlendFilter.m; path = Source/GPUImageSourceOverBlendFilter.m; sourceTree = SOURCE_ROOT; }; + BCF868971728839800912E34 /* GPUImageMaskFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMaskFilter.h; path = Source/GPUImageMaskFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868981728839C00912E34 /* GPUImageMaskFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMaskFilter.m; path = Source/GPUImageMaskFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8689B1728861E00912E34 /* GPUImageOpacityFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageOpacityFilter.h; path = Source/GPUImageOpacityFilter.h; sourceTree = SOURCE_ROOT; }; + BCF8689C1728861F00912E34 /* GPUImageOpacityFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageOpacityFilter.m; path = Source/GPUImageOpacityFilter.m; sourceTree = SOURCE_ROOT; }; + BCF8689F1728865500912E34 /* GPUImageAlphaBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageAlphaBlendFilter.h; path = Source/GPUImageAlphaBlendFilter.h; sourceTree = SOURCE_ROOT; }; + BCF868A01728865D00912E34 /* GPUImageAlphaBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageAlphaBlendFilter.m; path = Source/GPUImageAlphaBlendFilter.m; sourceTree = SOURCE_ROOT; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + BCA3F31817239B6500E28AEC /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + BCF8675D1725BBE900912E34 /* CoreMedia.framework in Frameworks */, + BCF8675B1725BBE300912E34 /* AVFoundation.framework in Frameworks */, + BCA3F3621723A8FA00E28AEC /* OpenGL.framework in Frameworks */, + BCA3F3631723A8FA00E28AEC /* QuartzCore.framework in Frameworks */, + BCA3F32017239B6500E28AEC /* Cocoa.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + BCA3F31217239B6500E28AEC = { + isa = PBXGroup; + children = ( + BCA3F32517239B6500E28AEC /* GPUImageMac */, + BCA3F31E17239B6500E28AEC /* Frameworks */, + BCA3F31D17239B6500E28AEC /* Products */, + ); + sourceTree = ""; + }; + BCA3F31D17239B6500E28AEC /* Products */ = { + isa = PBXGroup; + children = ( + BCA3F31C17239B6500E28AEC /* GPUImage.framework */, + ); + name = Products; + sourceTree = ""; + }; + BCA3F31E17239B6500E28AEC /* Frameworks */ = { + isa = PBXGroup; + children = ( + BCA3F31F17239B6500E28AEC /* Cocoa.framework */, + BCA3F3601723A8FA00E28AEC /* OpenGL.framework */, + BCA3F3611723A8FA00E28AEC /* QuartzCore.framework */, + BCF8675C1725BBE900912E34 /* CoreMedia.framework */, + BCF8675A1725BBE300912E34 /* AVFoundation.framework */, + BCA3F32117239B6500E28AEC /* Other Frameworks */, + ); + name = Frameworks; + sourceTree = ""; + }; + BCA3F32117239B6500E28AEC /* Other Frameworks */ = { + isa = PBXGroup; + children = ( + BCA3F32217239B6500E28AEC /* AppKit.framework */, + BCA3F32317239B6500E28AEC /* CoreData.framework */, + BCA3F32417239B6500E28AEC /* Foundation.framework */, + ); + name = "Other Frameworks"; + sourceTree = ""; + }; + BCA3F32517239B6500E28AEC /* GPUImageMac */ = { + isa = PBXGroup; + children = ( + BCA3F3641723A96600E28AEC /* GPUImage.h */, + BCF40F1417247FD8005AE36A /* GLProgram.h */, + BCF40F1517247FD8005AE36A /* GLProgram.m */, + BCF40F1017247D68005AE36A /* GPUImageContext.h */, + BCF40F1117247D68005AE36A /* GPUImageContext.m */, + BCF40F1817248286005AE36A /* Sources */, + BCF40F1D17248308005AE36A /* Filters */, + BCF40F2217248811005AE36A /* Outputs */, + BCA3F32617239B6500E28AEC /* Supporting Files */, + ); + path = GPUImageMac; + sourceTree = ""; + }; + BCA3F32617239B6500E28AEC /* Supporting Files */ = { + isa = PBXGroup; + children = ( + BCA3F32717239B6500E28AEC /* GPUImageMac-Info.plist */, + BCA3F32B17239B6500E28AEC /* GPUImageMac-Prefix.pch */, + ); + name = "Supporting Files"; + sourceTree = ""; + }; + BCF40F1817248286005AE36A /* Sources */ = { + isa = PBXGroup; + children = ( + A87E5E1417764B16007FD5B1 /* GPUImageRawDataInput.h */, + A87E5E1517764B16007FD5B1 /* GPUImageRawDataInput.m */, + BCF40F191724829B005AE36A /* GPUImageOutput.h */, + BCF40F1A1724829B005AE36A /* GPUImageOutput.m */, + BCF8679B1727585D00912E34 /* GPUImageAVCamera.h */, + BCF8679C1727585D00912E34 /* GPUImageAVCamera.m */, + BCF867541725AF9300912E34 /* GPUImagePicture.h */, + BCF867551725AF9300912E34 /* GPUImagePicture.m */, + ); + name = Sources; + sourceTree = ""; + }; + BCF40F1D17248308005AE36A /* Filters */ = { + isa = PBXGroup; + children = ( + BCF40F1E17248322005AE36A /* GPUImageFilter.h */, + BCF40F1F17248322005AE36A /* GPUImageFilter.m */, + BCF867FE17284C3C00912E34 /* GPUImageFilterGroup.h */, + BCF867FF17284C3E00912E34 /* GPUImageFilterGroup.m */, + BCF867DA17278C5800912E34 /* GPUImageTwoPassFilter.h */, + BCF867DB17278C5900912E34 /* GPUImageTwoPassFilter.m */, + BCF867FA1728496A00912E34 /* GPUImageTwoPassTextureSamplingFilter.h */, + BCF867FB1728496C00912E34 /* GPUImageTwoPassTextureSamplingFilter.m */, + BCF868831728771D00912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.h */, + BCF868841728772600912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.m */, + BCF867F21728490300912E34 /* GPUImageTwoInputFilter.h */, + BCF867F31728490500912E34 /* GPUImageTwoInputFilter.m */, + BCAE41E21731C3000020F80D /* GPUImageThreeInputFilter.h */, + BCAE41E31731C3000020F80D /* GPUImageThreeInputFilter.m */, + BCF867C9172788F100912E34 /* GPUImage3x3TextureSamplingFilter.h */, + BCF867CA172788F100912E34 /* GPUImage3x3TextureSamplingFilter.m */, + BCF867C31727820100912E34 /* Color processing */, + BCF867D1172789C800912E34 /* Image processing */, + BCF867C4172786BA00912E34 /* Effects */, + BCF8682C17286E5F00912E34 /* Blends */, + ); + name = Filters; + sourceTree = ""; + }; + BCF40F2217248811005AE36A /* Outputs */ = { + isa = PBXGroup; + children = ( + A87E5E10177648F3007FD5B1 /* GPUImageRawDataOutput.h */, + A87E5E11177648F3007FD5B1 /* GPUImageRawDataOutput.m */, + BCF40F2317248836005AE36A /* GPUImageView.h */, + BCF40F2417248836005AE36A /* GPUImageView.m */, + BCF867521725AF9300912E34 /* GPUImageMovieWriter.h */, + BCF867531725AF9300912E34 /* GPUImageMovieWriter.m */, + ); + name = Outputs; + sourceTree = ""; + }; + BCF867C31727820100912E34 /* Color processing */ = { + isa = PBXGroup; + children = ( + BCF867A9172778D500912E34 /* GPUImageSaturationFilter.h */, + BCF867AA172778D500912E34 /* GPUImageSaturationFilter.m */, + BCF867A3172778D500912E34 /* GPUImageContrastFilter.h */, + BCF867A4172778D500912E34 /* GPUImageContrastFilter.m */, + BCF40F27172488D3005AE36A /* GPUImageBrightnessFilter.h */, + BCF40F28172488D3005AE36A /* GPUImageBrightnessFilter.m */, + BCF867B71727791200912E34 /* GPUImageLevelsFilter.h */, + BCF867B81727791200912E34 /* GPUImageLevelsFilter.m */, + BCF867A5172778D500912E34 /* GPUImageExposureFilter.h */, + BCF867A6172778D500912E34 /* GPUImageExposureFilter.m */, + BCF867BD1727794000912E34 /* GPUImageRGBFilter.h */, + BCF867BE1727794000912E34 /* GPUImageRGBFilter.m */, + BCF867A7172778D500912E34 /* GPUImageHueFilter.h */, + BCF867A8172778D500912E34 /* GPUImageHueFilter.m */, + BCF867AB172778D500912E34 /* GPUImageWhiteBalanceFilter.h */, + BCF867AC172778D500912E34 /* GPUImageWhiteBalanceFilter.m */, + BCF867BB1727794000912E34 /* GPUImageMonochromeFilter.h */, + BCF867BC1727794000912E34 /* GPUImageMonochromeFilter.m */, + BCF867DE17278D1900912E34 /* GPUImageGrayscaleFilter.h */, + BCF867DF17278D1900912E34 /* GPUImageGrayscaleFilter.m */, + BCF867E61728477200912E34 /* GPUImageFalseColorFilter.h */, + BCF867E71728477200912E34 /* GPUImageFalseColorFilter.m */, + BCB009E4172A1BBD00DB804C /* GPUImageGammaFilter.h */, + BCB009E5172A1BBD00DB804C /* GPUImageGammaFilter.m */, + BCB009E8172A1BE800DB804C /* GPUImageToneCurveFilter.h */, + BCB009E9172A1BE800DB804C /* GPUImageToneCurveFilter.m */, + BCB009EE172A23AA00DB804C /* GPUImageHighlightShadowFilter.h */, + BCB009EF172A23AA00DB804C /* GPUImageHighlightShadowFilter.m */, + BCB009EC172A23A900DB804C /* GPUImageHazeFilter.h */, + BCB009ED172A23A900DB804C /* GPUImageHazeFilter.m */, + BCB009F8172A246D00DB804C /* GPUImageColorMatrixFilter.h */, + BCB009F9172A246E00DB804C /* GPUImageColorMatrixFilter.m */, + BCB009F4172A243300DB804C /* GPUImageSepiaFilter.h */, + BCB009F5172A243400DB804C /* GPUImageSepiaFilter.m */, + BC78B708172DCCB800342C6A /* GPUImageLookupFilter.h */, + BC78B709172DCCB800342C6A /* GPUImageLookupFilter.m */, + BC78B706172DCCB800342C6A /* GPUImageAmatorkaFilter.h */, + BC78B707172DCCB800342C6A /* GPUImageAmatorkaFilter.m */, + BC78B70A172DCCB800342C6A /* GPUImageMissEtikateFilter.h */, + BC78B70B172DCCB800342C6A /* GPUImageMissEtikateFilter.m */, + BC78B70C172DCCB800342C6A /* GPUImageSoftEleganceFilter.h */, + BC78B70D172DCCB800342C6A /* GPUImageSoftEleganceFilter.m */, + BCC0DF4017358315007C485F /* GPUImageColorInvertFilter.h */, + BCC0DF4117358315007C485F /* GPUImageColorInvertFilter.m */, + BCC0DF4417359529007C485F /* GPUImageHistogramFilter.h */, + BCC0DF4517359529007C485F /* GPUImageHistogramFilter.m */, + BCC0DF4617359529007C485F /* GPUImageHistogramGenerator.h */, + BCC0DF4717359529007C485F /* GPUImageHistogramGenerator.m */, + BCC0DF4C173595D3007C485F /* GPUImageAverageColor.h */, + BCC0DF4D173595D3007C485F /* GPUImageAverageColor.m */, + BCC0DF4E173595D3007C485F /* GPUImageLuminosity.h */, + BCC0DF4F173595D3007C485F /* GPUImageLuminosity.m */, + BCC0DF50173595D3007C485F /* GPUImageSolidColorGenerator.h */, + BCC0DF51173595D3007C485F /* GPUImageSolidColorGenerator.m */, + BCC0DF58173596A8007C485F /* GPUImageAdaptiveThresholdFilter.h */, + BCC0DF59173596A8007C485F /* GPUImageAdaptiveThresholdFilter.m */, + BCC0DF6017359F42007C485F /* GPUImageLuminanceThresholdFilter.h */, + BCC0DF6117359F42007C485F /* GPUImageLuminanceThresholdFilter.m */, + BCC0DF5C173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.h */, + BCC0DF5D173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.m */, + BCE817A01735FDC60071D084 /* GPUImageHalftoneFilter.h */, + BCE817A11735FDC60071D084 /* GPUImageHalftoneFilter.m */, + BCE817A21735FDC60071D084 /* GPUImagePixellatePositionFilter.h */, + BCE817A31735FDC60071D084 /* GPUImagePixellatePositionFilter.m */, + BCE817A41735FDC70071D084 /* GPUImagePolarPixellateFilter.h */, + BCE817A51735FDC70071D084 /* GPUImagePolarPixellateFilter.m */, + BCE817A61735FDC70071D084 /* GPUImagePolkaDotFilter.h */, + BCE817A71735FDC70071D084 /* GPUImagePolkaDotFilter.m */, + BCE817B01735FE150071D084 /* GPUImageCrosshatchFilter.h */, + BCE817B11735FE150071D084 /* GPUImageCrosshatchFilter.m */, + BCF8689B1728861E00912E34 /* GPUImageOpacityFilter.h */, + BCF8689C1728861F00912E34 /* GPUImageOpacityFilter.m */, + BCA20B6B1746D44A0097C84A /* GPUImageCGAColorspaceFilter.h */, + BCA20B6C1746D44A0097C84A /* GPUImageCGAColorspaceFilter.m */, + ); + name = "Color processing"; + sourceTree = ""; + }; + BCF867C4172786BA00912E34 /* Effects */ = { + isa = PBXGroup; + children = ( + BCF867C51727872D00912E34 /* GPUImagePixellateFilter.h */, + BCF867C61727872D00912E34 /* GPUImagePixellateFilter.m */, + BCF867CD172789B200912E34 /* GPUImageSketchFilter.h */, + BCF867CE172789B200912E34 /* GPUImageSketchFilter.m */, + BCA20B5F1746CFE60097C84A /* GPUImageThresholdSketchFilter.h */, + BCA20B601746CFE60097C84A /* GPUImageThresholdSketchFilter.m */, + BCF867D617278A7900912E34 /* GPUImageToonFilter.h */, + BCF867D717278A7900912E34 /* GPUImageToonFilter.m */, + BCA20B631746D3C30097C84A /* GPUImageSmoothToonFilter.h */, + BCA20B641746D3C30097C84A /* GPUImageSmoothToonFilter.m */, + BCA20B671746D3DC0097C84A /* GPUImageTiltShiftFilter.h */, + BCA20B681746D3DC0097C84A /* GPUImageTiltShiftFilter.m */, + BCBF69331730623200E5792A /* GPUImageEmbossFilter.h */, + BCBF69341730623200E5792A /* GPUImageEmbossFilter.m */, + BCA20B771746DEDD0097C84A /* GPUImageChromaKeyFilter.h */, + BCA20B781746DEDE0097C84A /* GPUImageChromaKeyFilter.m */, + BCF867E21727903C00912E34 /* GPUImageKuwaharaFilter.h */, + BCF867E31727903D00912E34 /* GPUImageKuwaharaFilter.m */, + BCA20B731746DE850097C84A /* GPUImageKuwaharaRadius3Filter.h */, + BCA20B741746DE850097C84A /* GPUImageKuwaharaRadius3Filter.m */, + BCA20B6F1746D4AC0097C84A /* GPUImagePosterizeFilter.h */, + BCA20B701746D4AC0097C84A /* GPUImagePosterizeFilter.m */, + BCA20B7B1746DF200097C84A /* GPUImageVignetteFilter.h */, + BCA20B7C1746DF200097C84A /* GPUImageVignetteFilter.m */, + BCA20B971747081C0097C84A /* GPUImageSwirlFilter.h */, + BCA20B981747081D0097C84A /* GPUImageSwirlFilter.m */, + BCA20B7F1747080E0097C84A /* GPUImageBulgeDistortionFilter.h */, + BCA20B801747080F0097C84A /* GPUImageBulgeDistortionFilter.m */, + BCA20B89174708150097C84A /* GPUImagePinchDistortionFilter.h */, + BCA20B8A174708150097C84A /* GPUImagePinchDistortionFilter.m */, + BCA20B951747081C0097C84A /* GPUImageStretchDistortionFilter.h */, + BCA20B961747081C0097C84A /* GPUImageStretchDistortionFilter.m */, + BCA20B811747080F0097C84A /* GPUImageClosingFilter.h */, + BCA20B82174708100097C84A /* GPUImageClosingFilter.m */, + BCA20B8B174708160097C84A /* GPUImageRGBClosingFilter.h */, + BCA20B8C174708160097C84A /* GPUImageRGBClosingFilter.m */, + BCA20B83174708110097C84A /* GPUImageDilationFilter.h */, + BCA20B84174708120097C84A /* GPUImageDilationFilter.m */, + BCA20B8D174708190097C84A /* GPUImageRGBDilationFilter.h */, + BCA20B8E174708190097C84A /* GPUImageRGBDilationFilter.m */, + BCA20B85174708120097C84A /* GPUImageErosionFilter.h */, + BCA20B86174708130097C84A /* GPUImageErosionFilter.m */, + BCA20B8F174708190097C84A /* GPUImageRGBErosionFilter.h */, + BCA20B901747081A0097C84A /* GPUImageRGBErosionFilter.m */, + BCA20BB3174708610097C84A /* GPUImageOpeningFilter.h */, + BCA20BB4174708620097C84A /* GPUImageOpeningFilter.m */, + BCA20B911747081A0097C84A /* GPUImageRGBOpeningFilter.h */, + BCA20B921747081B0097C84A /* GPUImageRGBOpeningFilter.m */, + BCA20B931747081B0097C84A /* GPUImageSphereRefractionFilter.h */, + BCA20B941747081B0097C84A /* GPUImageSphereRefractionFilter.m */, + BCA20B87174708140097C84A /* GPUImageGlassSphereFilter.h */, + BCA20B88174708150097C84A /* GPUImageGlassSphereFilter.m */, + BCA20BBB17471C400097C84A /* GPUImageMosaicFilter.h */, + BCA20BBC17471C420097C84A /* GPUImageMosaicFilter.m */, + BCA20BBD17471C440097C84A /* GPUImagePerlinNoiseFilter.h */, + BCA20BBE17471C460097C84A /* GPUImagePerlinNoiseFilter.m */, + BCA20BB717471C3A0097C84A /* GPUImageJFAVoronoiFilter.h */, + BCA20BB817471C3C0097C84A /* GPUImageJFAVoronoiFilter.m */, + BCA20BBF17471C480097C84A /* GPUImageVoronoiConsumerFilter.h */, + BCA20BC017471C490097C84A /* GPUImageVoronoiConsumerFilter.m */, + ); + name = Effects; + sourceTree = ""; + }; + BCF867D1172789C800912E34 /* Image processing */ = { + isa = PBXGroup; + children = ( + BCF8680217284CFB00912E34 /* GPUImageTransformFilter.h */, + BCF8680317284CFB00912E34 /* GPUImageTransformFilter.m */, + BCF8680617284D0A00912E34 /* GPUImageCropFilter.h */, + BCF8680717284D0A00912E34 /* GPUImageCropFilter.m */, + BCBF692F173061D200E5792A /* GPUImage3x3ConvolutionFilter.h */, + BCBF6930173061D200E5792A /* GPUImage3x3ConvolutionFilter.m */, + BC6C553A1730636600EB222D /* GPUImageLaplacianFilter.h */, + BC6C553B1730636600EB222D /* GPUImageLaplacianFilter.m */, + BCF867D217278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.h */, + BCF867D317278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.m */, + BCE817B6173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.h */, + BCE817B7173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.m */, + BCE817B4173608690071D084 /* GPUImageCannyEdgeDetectionFilter.h */, + BCE817B5173608690071D084 /* GPUImageCannyEdgeDetectionFilter.m */, + BCE817D017360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.h */, + BCE817D117360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.m */, + BCE817C817360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.h */, + BCE817C917360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.m */, + BCE817CA17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.h */, + BCE817CB17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.m */, + BCE817B8173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.h */, + BCE817B9173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.m */, + BCE817C01736092A0071D084 /* GPUImageXYDerivativeFilter.h */, + BCE817C11736092A0071D084 /* GPUImageXYDerivativeFilter.m */, + BCE2EF8217459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.h */, + BCE2EF8317459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.m */, + BCE2EF8417459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.h */, + BCE2EF8517459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.m */, + BCE2EF8617459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.h */, + BCE2EF8717459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.m */, + BC569576174683620081491B /* GPUImageCrosshairGenerator.h */, + BC569577174683620081491B /* GPUImageCrosshairGenerator.m */, + BCE2EF8E17459D9300C0628C /* GPUImageColorPackingFilter.h */, + BCE2EF8F17459D9300C0628C /* GPUImageColorPackingFilter.m */, + BC96A4EF176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.h */, + BC96A4F0176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.m */, + BCE2EF9017459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.h */, + BCE2EF9117459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.m */, + BCE2EF9617459E2400C0628C /* GPUImageHoughTransformLineDetector.h */, + BCE2EF9717459E2400C0628C /* GPUImageHoughTransformLineDetector.m */, + BC569578174683620081491B /* GPUImageLineGenerator.h */, + BC569579174683620081491B /* GPUImageLineGenerator.m */, + BCE2EF9A17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.h */, + BCE2EF9B17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.m */, + BCA20B4F1746C9EC0097C84A /* GPUImageBuffer.h */, + BCA20B501746C9EC0097C84A /* GPUImageBuffer.m */, + BCA20B511746C9EC0097C84A /* GPUImageHighPassFilter.h */, + BCA20B521746C9EC0097C84A /* GPUImageHighPassFilter.m */, + BCA20B531746C9EC0097C84A /* GPUImageLowPassFilter.h */, + BCA20B541746C9EC0097C84A /* GPUImageLowPassFilter.m */, + BCA20B551746C9EC0097C84A /* GPUImageMotionDetector.h */, + BCA20B561746C9EC0097C84A /* GPUImageMotionDetector.m */, + BCF86816172854AE00912E34 /* GPUImageBoxBlurFilter.h */, + BCF86817172854B100912E34 /* GPUImageBoxBlurFilter.m */, + BCF867F61728494B00912E34 /* GPUImageGaussianBlurFilter.h */, + BCF867F71728494B00912E34 /* GPUImageGaussianBlurFilter.m */, + BC8A583018124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.h */, + BC8A583118124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.m */, + BCF8680E172853A700912E34 /* GPUImageGaussianSelectiveBlurFilter.h */, + BCF8680F172853A800912E34 /* GPUImageGaussianSelectiveBlurFilter.m */, + BCF8680C172853A500912E34 /* GPUImageGaussianBlurPositionFilter.h */, + BCF8680D172853A600912E34 /* GPUImageGaussianBlurPositionFilter.m */, + BCF8681A172854B600912E34 /* GPUImageMedianFilter.h */, + BCF8681B172854B800912E34 /* GPUImageMedianFilter.m */, + BCF86814172854AA00912E34 /* GPUImageBilateralFilter.h */, + BCF86815172854AC00912E34 /* GPUImageBilateralFilter.m */, + BCF867EA172847A000912E34 /* GPUImageSharpenFilter.h */, + BCF867EB172847A000912E34 /* GPUImageSharpenFilter.m */, + BCF867EE1728487000912E34 /* GPUImageUnsharpMaskFilter.h */, + BCF867EF1728487000912E34 /* GPUImageUnsharpMaskFilter.m */, + BCF86824172857AD00912E34 /* GPUImageMotionBlurFilter.h */, + BCF86825172857B000912E34 /* GPUImageMotionBlurFilter.m */, + BCF86826172857B200912E34 /* GPUImageZoomBlurFilter.h */, + BCF86827172857B600912E34 /* GPUImageZoomBlurFilter.m */, + BC6C55441730BDCF00EB222D /* GPUImageLanczosResamplingFilter.h */, + BC6C55451730BDCF00EB222D /* GPUImageLanczosResamplingFilter.m */, + BCA20BB917471C3D0097C84A /* GPUImageLocalBinaryPatternFilter.h */, + BCA20BBA17471C3F0097C84A /* GPUImageLocalBinaryPatternFilter.m */, + ); + name = "Image processing"; + sourceTree = ""; + }; + BCF8682C17286E5F00912E34 /* Blends */ = { + isa = PBXGroup; + children = ( + BCF868971728839800912E34 /* GPUImageMaskFilter.h */, + BCF868981728839C00912E34 /* GPUImageMaskFilter.m */, + BCF8688D17287EC400912E34 /* GPUImageDissolveBlendFilter.h */, + BCF8688E17287ECC00912E34 /* GPUImageDissolveBlendFilter.m */, + BCF8688B17287EB500912E34 /* GPUImageChromaKeyBlendFilter.h */, + BCF8688C17287EBB00912E34 /* GPUImageChromaKeyBlendFilter.m */, + BCF8682D17286EDB00912E34 /* GPUImageAddBlendFilter.h */, + BCF8682E17286EDB00912E34 /* GPUImageAddBlendFilter.m */, + BCF8683717286F2800912E34 /* GPUImageDivideBlendFilter.h */, + BCF8683817286F2800912E34 /* GPUImageDivideBlendFilter.m */, + BCF8683B17286F2800912E34 /* GPUImageMultiplyBlendFilter.h */, + BCF8683C17286F2800912E34 /* GPUImageMultiplyBlendFilter.m */, + BCF8683D17286F2800912E34 /* GPUImageOverlayBlendFilter.h */, + BCF8683E17286F2800912E34 /* GPUImageOverlayBlendFilter.m */, + BCF8683917286F2800912E34 /* GPUImageLightenBlendFilter.h */, + BCF8683A17286F2800912E34 /* GPUImageLightenBlendFilter.m */, + BCF8683517286F2800912E34 /* GPUImageDarkenBlendFilter.h */, + BCF8683617286F2800912E34 /* GPUImageDarkenBlendFilter.m */, + BCF8683317286F2800912E34 /* GPUImageColorBurnBlendFilter.h */, + BCF8683417286F2800912E34 /* GPUImageColorBurnBlendFilter.m */, + BCF8684B1728715800912E34 /* GPUImageColorDodgeBlendFilter.h */, + BCF8684C1728715900912E34 /* GPUImageColorDodgeBlendFilter.m */, + BCF8684F1728715F00912E34 /* GPUImageLinearBurnBlendFilter.h */, + BCF868501728716000912E34 /* GPUImageLinearBurnBlendFilter.m */, + BCF868511728716200912E34 /* GPUImageScreenBlendFilter.h */, + BCF868521728716300912E34 /* GPUImageScreenBlendFilter.m */, + BCF8684D1728715C00912E34 /* GPUImageDifferenceBlendFilter.h */, + BCF8684E1728715D00912E34 /* GPUImageDifferenceBlendFilter.m */, + BCF8686D1728720F00912E34 /* GPUImageSubtractBlendFilter.h */, + BCF8686E1728721100912E34 /* GPUImageSubtractBlendFilter.m */, + BCF8685D172871F300912E34 /* GPUImageExclusionBlendFilter.h */, + BCF8685E172871F500912E34 /* GPUImageExclusionBlendFilter.m */, + BCF8685F172871F800912E34 /* GPUImageHardLightBlendFilter.h */, + BCF86860172871FB00912E34 /* GPUImageHardLightBlendFilter.m */, + BCF8686B1728720C00912E34 /* GPUImageSoftLightBlendFilter.h */, + BCF8686C1728720E00912E34 /* GPUImageSoftLightBlendFilter.m */, + BCF8685B172871EE00912E34 /* GPUImageColorBlendFilter.h */, + BCF8685C172871F000912E34 /* GPUImageColorBlendFilter.m */, + BCF86861172871FC00912E34 /* GPUImageHueBlendFilter.h */, + BCF86862172871FD00912E34 /* GPUImageHueBlendFilter.m */, + BCF868691728720900912E34 /* GPUImageSaturationBlendFilter.h */, + BCF8686A1728720B00912E34 /* GPUImageSaturationBlendFilter.m */, + BCF86863172871FF00912E34 /* GPUImageLuminosityBlendFilter.h */, + BCF868641728720100912E34 /* GPUImageLuminosityBlendFilter.m */, + BCF868651728720200912E34 /* GPUImageNormalBlendFilter.h */, + BCF868661728720400912E34 /* GPUImageNormalBlendFilter.m */, + BCF868671728720600912E34 /* GPUImagePoissonBlendFilter.h */, + BCF868681728720700912E34 /* GPUImagePoissonBlendFilter.m */, + BCF86893172880AB00912E34 /* GPUImageSourceOverBlendFilter.h */, + BCF86894172880B100912E34 /* GPUImageSourceOverBlendFilter.m */, + BCF8689F1728865500912E34 /* GPUImageAlphaBlendFilter.h */, + BCF868A01728865D00912E34 /* GPUImageAlphaBlendFilter.m */, + ); + name = Blends; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXHeadersBuildPhase section */ + BCA3F31917239B6500E28AEC /* Headers */ = { + isa = PBXHeadersBuildPhase; + buildActionMask = 2147483647; + files = ( + BCA3F3651723A96600E28AEC /* GPUImage.h in Headers */, + A87E5E1617764B16007FD5B1 /* GPUImageRawDataInput.h in Headers */, + A87E5E12177648F3007FD5B1 /* GPUImageRawDataOutput.h in Headers */, + BCF40F1217247D68005AE36A /* GPUImageContext.h in Headers */, + BCF40F1617247FD8005AE36A /* GLProgram.h in Headers */, + BCF40F1B1724829B005AE36A /* GPUImageOutput.h in Headers */, + BCF40F2017248322005AE36A /* GPUImageFilter.h in Headers */, + BCF40F2517248836005AE36A /* GPUImageView.h in Headers */, + BCF867561725AF9300912E34 /* GPUImageMovieWriter.h in Headers */, + BCF867581725AF9300912E34 /* GPUImagePicture.h in Headers */, + BCF8679D1727585D00912E34 /* GPUImageAVCamera.h in Headers */, + BCF40F29172488D3005AE36A /* GPUImageBrightnessFilter.h in Headers */, + BCF867AD172778D500912E34 /* GPUImageContrastFilter.h in Headers */, + BCF867AF172778D500912E34 /* GPUImageExposureFilter.h in Headers */, + BCF867B1172778D500912E34 /* GPUImageHueFilter.h in Headers */, + BCF867B3172778D500912E34 /* GPUImageSaturationFilter.h in Headers */, + BCF867B5172778D500912E34 /* GPUImageWhiteBalanceFilter.h in Headers */, + BCF867B91727791200912E34 /* GPUImageLevelsFilter.h in Headers */, + BCF867BF1727794000912E34 /* GPUImageMonochromeFilter.h in Headers */, + BCF867C11727794000912E34 /* GPUImageRGBFilter.h in Headers */, + BCF867C71727872D00912E34 /* GPUImagePixellateFilter.h in Headers */, + BCF867CB172788F100912E34 /* GPUImage3x3TextureSamplingFilter.h in Headers */, + BCF867CF172789B200912E34 /* GPUImageSketchFilter.h in Headers */, + BCF867D417278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.h in Headers */, + BCF867D817278A7A00912E34 /* GPUImageToonFilter.h in Headers */, + BCF867DC17278C5A00912E34 /* GPUImageTwoPassFilter.h in Headers */, + BCF867E017278D1900912E34 /* GPUImageGrayscaleFilter.h in Headers */, + BCF867E41727903D00912E34 /* GPUImageKuwaharaFilter.h in Headers */, + BCF867E81728477300912E34 /* GPUImageFalseColorFilter.h in Headers */, + BCF867EC172847A000912E34 /* GPUImageSharpenFilter.h in Headers */, + BCF867F01728487100912E34 /* GPUImageUnsharpMaskFilter.h in Headers */, + BCF867F41728490600912E34 /* GPUImageTwoInputFilter.h in Headers */, + BCF867F81728494C00912E34 /* GPUImageGaussianBlurFilter.h in Headers */, + BCF867FC1728496E00912E34 /* GPUImageTwoPassTextureSamplingFilter.h in Headers */, + BCF8680017284C4000912E34 /* GPUImageFilterGroup.h in Headers */, + BCF8680417284CFC00912E34 /* GPUImageTransformFilter.h in Headers */, + BCF8680817284D0B00912E34 /* GPUImageCropFilter.h in Headers */, + BCF86810172853AA00912E34 /* GPUImageGaussianBlurPositionFilter.h in Headers */, + BCF86812172853AA00912E34 /* GPUImageGaussianSelectiveBlurFilter.h in Headers */, + BCF8681C172854B900912E34 /* GPUImageBilateralFilter.h in Headers */, + BCF8681E172854B900912E34 /* GPUImageBoxBlurFilter.h in Headers */, + BCF86822172854B900912E34 /* GPUImageMedianFilter.h in Headers */, + BCF86828172857B900912E34 /* GPUImageMotionBlurFilter.h in Headers */, + BCF8682A172857B900912E34 /* GPUImageZoomBlurFilter.h in Headers */, + BCF8683017286EDB00912E34 /* GPUImageAddBlendFilter.h in Headers */, + BCF8683F17286F2800912E34 /* GPUImageColorBurnBlendFilter.h in Headers */, + BCF8684117286F2800912E34 /* GPUImageDarkenBlendFilter.h in Headers */, + BCF8684317286F2800912E34 /* GPUImageDivideBlendFilter.h in Headers */, + BCF8684517286F2800912E34 /* GPUImageLightenBlendFilter.h in Headers */, + BCF8684717286F2800912E34 /* GPUImageMultiplyBlendFilter.h in Headers */, + BCF8684917286F2800912E34 /* GPUImageOverlayBlendFilter.h in Headers */, + BCF868531728716400912E34 /* GPUImageColorDodgeBlendFilter.h in Headers */, + BCF868551728716400912E34 /* GPUImageDifferenceBlendFilter.h in Headers */, + BCF868571728716400912E34 /* GPUImageLinearBurnBlendFilter.h in Headers */, + BCF868591728716400912E34 /* GPUImageScreenBlendFilter.h in Headers */, + BCF8686F1728721300912E34 /* GPUImageColorBlendFilter.h in Headers */, + BCF868711728721300912E34 /* GPUImageExclusionBlendFilter.h in Headers */, + BCF868731728721300912E34 /* GPUImageHardLightBlendFilter.h in Headers */, + BCF868751728721300912E34 /* GPUImageHueBlendFilter.h in Headers */, + BCF868771728721300912E34 /* GPUImageLuminosityBlendFilter.h in Headers */, + BCF868791728721300912E34 /* GPUImageNormalBlendFilter.h in Headers */, + BCF8687B1728721300912E34 /* GPUImagePoissonBlendFilter.h in Headers */, + BCF8687D1728721300912E34 /* GPUImageSaturationBlendFilter.h in Headers */, + BCF8687F1728721300912E34 /* GPUImageSoftLightBlendFilter.h in Headers */, + BCF868811728721300912E34 /* GPUImageSubtractBlendFilter.h in Headers */, + BCF868851728773900912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.h in Headers */, + BCF8688F17287ED000912E34 /* GPUImageChromaKeyBlendFilter.h in Headers */, + BCF8689117287ED000912E34 /* GPUImageDissolveBlendFilter.h in Headers */, + BCF86895172880B800912E34 /* GPUImageSourceOverBlendFilter.h in Headers */, + BCF86899172883A300912E34 /* GPUImageMaskFilter.h in Headers */, + BCF8689D1728862100912E34 /* GPUImageOpacityFilter.h in Headers */, + BCF868A11728866400912E34 /* GPUImageAlphaBlendFilter.h in Headers */, + BCB009E6172A1BBD00DB804C /* GPUImageGammaFilter.h in Headers */, + BCB009EA172A1BE800DB804C /* GPUImageToneCurveFilter.h in Headers */, + BCB009F0172A23AA00DB804C /* GPUImageHazeFilter.h in Headers */, + BCB009F2172A23AA00DB804C /* GPUImageHighlightShadowFilter.h in Headers */, + BCB009F6172A243400DB804C /* GPUImageSepiaFilter.h in Headers */, + BCB009FA172A246E00DB804C /* GPUImageColorMatrixFilter.h in Headers */, + BC78B710172DCCB800342C6A /* GPUImageLookupFilter.h in Headers */, + BC78B70E172DCCB800342C6A /* GPUImageAmatorkaFilter.h in Headers */, + BC78B712172DCCB800342C6A /* GPUImageMissEtikateFilter.h in Headers */, + BC78B714172DCCB800342C6A /* GPUImageSoftEleganceFilter.h in Headers */, + BCBF6931173061D200E5792A /* GPUImage3x3ConvolutionFilter.h in Headers */, + BCBF69351730623200E5792A /* GPUImageEmbossFilter.h in Headers */, + BC6C553C1730636600EB222D /* GPUImageLaplacianFilter.h in Headers */, + BC6C55461730BDCF00EB222D /* GPUImageLanczosResamplingFilter.h in Headers */, + BCAE41E41731C3000020F80D /* GPUImageThreeInputFilter.h in Headers */, + BCC0DF4217358315007C485F /* GPUImageColorInvertFilter.h in Headers */, + BCC0DF4817359529007C485F /* GPUImageHistogramFilter.h in Headers */, + BCC0DF4A17359529007C485F /* GPUImageHistogramGenerator.h in Headers */, + BCC0DF52173595D3007C485F /* GPUImageAverageColor.h in Headers */, + BCC0DF54173595D3007C485F /* GPUImageLuminosity.h in Headers */, + BCC0DF56173595D3007C485F /* GPUImageSolidColorGenerator.h in Headers */, + BCC0DF5A173596A8007C485F /* GPUImageAdaptiveThresholdFilter.h in Headers */, + BCC0DF5E173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.h in Headers */, + BCC0DF6217359F42007C485F /* GPUImageLuminanceThresholdFilter.h in Headers */, + BCE817A81735FDC70071D084 /* GPUImageHalftoneFilter.h in Headers */, + BCE817AA1735FDC70071D084 /* GPUImagePixellatePositionFilter.h in Headers */, + BCE817AC1735FDC70071D084 /* GPUImagePolarPixellateFilter.h in Headers */, + BCE817AE1735FDC70071D084 /* GPUImagePolkaDotFilter.h in Headers */, + BCE817B21735FE150071D084 /* GPUImageCrosshatchFilter.h in Headers */, + BCE817BA173608690071D084 /* GPUImageCannyEdgeDetectionFilter.h in Headers */, + BCE817BC173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.h in Headers */, + BCE817BE173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.h in Headers */, + BCE817C21736092A0071D084 /* GPUImageXYDerivativeFilter.h in Headers */, + BCE817CC17360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.h in Headers */, + BCE817CE17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.h in Headers */, + BCE817D217360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.h in Headers */, + BCE2EF8817459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.h in Headers */, + BCE2EF8A17459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.h in Headers */, + BCE2EF8C17459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.h in Headers */, + BCE2EF9217459D9300C0628C /* GPUImageColorPackingFilter.h in Headers */, + BCE2EF9417459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.h in Headers */, + BCE2EF9817459E2400C0628C /* GPUImageHoughTransformLineDetector.h in Headers */, + BCE2EF9C17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.h in Headers */, + BC56957A174683620081491B /* GPUImageCrosshairGenerator.h in Headers */, + BC56957C174683620081491B /* GPUImageLineGenerator.h in Headers */, + BCA20B571746C9EC0097C84A /* GPUImageBuffer.h in Headers */, + BCA20B591746C9EC0097C84A /* GPUImageHighPassFilter.h in Headers */, + BCA20B5B1746C9EC0097C84A /* GPUImageLowPassFilter.h in Headers */, + BCA20B5D1746C9EC0097C84A /* GPUImageMotionDetector.h in Headers */, + BCA20B611746CFE60097C84A /* GPUImageThresholdSketchFilter.h in Headers */, + BCA20B651746D3C30097C84A /* GPUImageSmoothToonFilter.h in Headers */, + BCA20B691746D3DD0097C84A /* GPUImageTiltShiftFilter.h in Headers */, + BCA20B6D1746D44B0097C84A /* GPUImageCGAColorspaceFilter.h in Headers */, + BCA20B711746D4AC0097C84A /* GPUImagePosterizeFilter.h in Headers */, + BCA20B751746DE850097C84A /* GPUImageKuwaharaRadius3Filter.h in Headers */, + BCA20B791746DEDE0097C84A /* GPUImageChromaKeyFilter.h in Headers */, + BCA20B7D1746DF210097C84A /* GPUImageVignetteFilter.h in Headers */, + BCA20B991747081D0097C84A /* GPUImageBulgeDistortionFilter.h in Headers */, + BCA20B9B1747081D0097C84A /* GPUImageClosingFilter.h in Headers */, + BCA20B9D1747081D0097C84A /* GPUImageDilationFilter.h in Headers */, + BCA20B9F1747081D0097C84A /* GPUImageErosionFilter.h in Headers */, + BCA20BA11747081D0097C84A /* GPUImageGlassSphereFilter.h in Headers */, + BCA20BA31747081D0097C84A /* GPUImagePinchDistortionFilter.h in Headers */, + BCA20BA51747081D0097C84A /* GPUImageRGBClosingFilter.h in Headers */, + BCA20BA71747081D0097C84A /* GPUImageRGBDilationFilter.h in Headers */, + BCA20BA91747081D0097C84A /* GPUImageRGBErosionFilter.h in Headers */, + BCA20BAB1747081D0097C84A /* GPUImageRGBOpeningFilter.h in Headers */, + BCA20BAD1747081D0097C84A /* GPUImageSphereRefractionFilter.h in Headers */, + BCA20BAF1747081D0097C84A /* GPUImageStretchDistortionFilter.h in Headers */, + BCA20BB11747081D0097C84A /* GPUImageSwirlFilter.h in Headers */, + BCA20BB5174708630097C84A /* GPUImageOpeningFilter.h in Headers */, + BCA20BC117471C4B0097C84A /* GPUImageJFAVoronoiFilter.h in Headers */, + BCA20BC317471C4B0097C84A /* GPUImageLocalBinaryPatternFilter.h in Headers */, + BCA20BC517471C4B0097C84A /* GPUImageMosaicFilter.h in Headers */, + BCA20BC717471C4B0097C84A /* GPUImagePerlinNoiseFilter.h in Headers */, + BC8A583218124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.h in Headers */, + BCA20BC917471C4B0097C84A /* GPUImageVoronoiConsumerFilter.h in Headers */, + BC96A4F1176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.h in Headers */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXHeadersBuildPhase section */ + +/* Begin PBXNativeTarget section */ + BCA3F31B17239B6500E28AEC /* GPUImage */ = { + isa = PBXNativeTarget; + buildConfigurationList = BCA3F33117239B6500E28AEC /* Build configuration list for PBXNativeTarget "GPUImage" */; + buildPhases = ( + BCA3F31717239B6500E28AEC /* Sources */, + BCA3F31817239B6500E28AEC /* Frameworks */, + BCA3F31917239B6500E28AEC /* Headers */, + BCA3F31A17239B6500E28AEC /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = GPUImage; + productName = GPUImageMac; + productReference = BCA3F31C17239B6500E28AEC /* GPUImage.framework */; + productType = "com.apple.product-type.framework"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + BCA3F31317239B6500E28AEC /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 0500; + ORGANIZATIONNAME = "Sunset Lake Software LLC"; + }; + buildConfigurationList = BCA3F31617239B6500E28AEC /* Build configuration list for PBXProject "GPUImageMac" */; + compatibilityVersion = "Xcode 3.2"; + developmentRegion = English; + hasScannedForEncodings = 0; + knownRegions = ( + en, + ); + mainGroup = BCA3F31217239B6500E28AEC; + productRefGroup = BCA3F31D17239B6500E28AEC /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + BCA3F31B17239B6500E28AEC /* GPUImage */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + BCA3F31A17239B6500E28AEC /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + BCA3F31717239B6500E28AEC /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BCF40F1317247D68005AE36A /* GPUImageContext.m in Sources */, + BCF40F1717247FD8005AE36A /* GLProgram.m in Sources */, + BCF40F1C1724829B005AE36A /* GPUImageOutput.m in Sources */, + BCF40F2117248322005AE36A /* GPUImageFilter.m in Sources */, + BCF40F2617248836005AE36A /* GPUImageView.m in Sources */, + BCF40F2A172488D3005AE36A /* GPUImageBrightnessFilter.m in Sources */, + BCF867571725AF9300912E34 /* GPUImageMovieWriter.m in Sources */, + BCF867591725AF9300912E34 /* GPUImagePicture.m in Sources */, + BCF8679E1727585D00912E34 /* GPUImageAVCamera.m in Sources */, + BCF867AE172778D500912E34 /* GPUImageContrastFilter.m in Sources */, + BCF867B0172778D500912E34 /* GPUImageExposureFilter.m in Sources */, + BCF867B2172778D500912E34 /* GPUImageHueFilter.m in Sources */, + BCF867B4172778D500912E34 /* GPUImageSaturationFilter.m in Sources */, + BCF867B6172778D500912E34 /* GPUImageWhiteBalanceFilter.m in Sources */, + BCF867BA1727791200912E34 /* GPUImageLevelsFilter.m in Sources */, + BCF867C01727794000912E34 /* GPUImageMonochromeFilter.m in Sources */, + BCF867C21727794000912E34 /* GPUImageRGBFilter.m in Sources */, + BCF867C81727872D00912E34 /* GPUImagePixellateFilter.m in Sources */, + BCF867CC172788F100912E34 /* GPUImage3x3TextureSamplingFilter.m in Sources */, + BCF867D0172789B200912E34 /* GPUImageSketchFilter.m in Sources */, + BCF867D517278A5600912E34 /* GPUImageSobelEdgeDetectionFilter.m in Sources */, + BCF867D917278A7A00912E34 /* GPUImageToonFilter.m in Sources */, + BCF867DD17278C5A00912E34 /* GPUImageTwoPassFilter.m in Sources */, + BCF867E117278D1900912E34 /* GPUImageGrayscaleFilter.m in Sources */, + BCF867E51727903D00912E34 /* GPUImageKuwaharaFilter.m in Sources */, + BCF867E91728477300912E34 /* GPUImageFalseColorFilter.m in Sources */, + BCF867ED172847A000912E34 /* GPUImageSharpenFilter.m in Sources */, + BCF867F11728487100912E34 /* GPUImageUnsharpMaskFilter.m in Sources */, + BCF867F51728490600912E34 /* GPUImageTwoInputFilter.m in Sources */, + BCF867F91728494C00912E34 /* GPUImageGaussianBlurFilter.m in Sources */, + BCF867FD1728496E00912E34 /* GPUImageTwoPassTextureSamplingFilter.m in Sources */, + BCF8680117284C4000912E34 /* GPUImageFilterGroup.m in Sources */, + BCF8680517284CFC00912E34 /* GPUImageTransformFilter.m in Sources */, + BCF8680917284D0B00912E34 /* GPUImageCropFilter.m in Sources */, + BCF86811172853AA00912E34 /* GPUImageGaussianBlurPositionFilter.m in Sources */, + BCF86813172853AA00912E34 /* GPUImageGaussianSelectiveBlurFilter.m in Sources */, + BCF8681D172854B900912E34 /* GPUImageBilateralFilter.m in Sources */, + BCF8681F172854B900912E34 /* GPUImageBoxBlurFilter.m in Sources */, + BCF86823172854B900912E34 /* GPUImageMedianFilter.m in Sources */, + BCF86829172857B900912E34 /* GPUImageMotionBlurFilter.m in Sources */, + BCF8682B172857B900912E34 /* GPUImageZoomBlurFilter.m in Sources */, + BCF8683117286EDB00912E34 /* GPUImageAddBlendFilter.m in Sources */, + BCF8684017286F2800912E34 /* GPUImageColorBurnBlendFilter.m in Sources */, + BCF8684217286F2800912E34 /* GPUImageDarkenBlendFilter.m in Sources */, + BCF8684417286F2800912E34 /* GPUImageDivideBlendFilter.m in Sources */, + BCF8684617286F2800912E34 /* GPUImageLightenBlendFilter.m in Sources */, + BCF8684817286F2800912E34 /* GPUImageMultiplyBlendFilter.m in Sources */, + BCF8684A17286F2800912E34 /* GPUImageOverlayBlendFilter.m in Sources */, + BCF868541728716400912E34 /* GPUImageColorDodgeBlendFilter.m in Sources */, + BCF868561728716400912E34 /* GPUImageDifferenceBlendFilter.m in Sources */, + BCF868581728716400912E34 /* GPUImageLinearBurnBlendFilter.m in Sources */, + BCF8685A1728716400912E34 /* GPUImageScreenBlendFilter.m in Sources */, + BCF868701728721300912E34 /* GPUImageColorBlendFilter.m in Sources */, + BCF868721728721300912E34 /* GPUImageExclusionBlendFilter.m in Sources */, + BCF868741728721300912E34 /* GPUImageHardLightBlendFilter.m in Sources */, + BCF868761728721300912E34 /* GPUImageHueBlendFilter.m in Sources */, + BCF868781728721300912E34 /* GPUImageLuminosityBlendFilter.m in Sources */, + BCF8687A1728721300912E34 /* GPUImageNormalBlendFilter.m in Sources */, + BCF8687C1728721300912E34 /* GPUImagePoissonBlendFilter.m in Sources */, + BCF8687E1728721300912E34 /* GPUImageSaturationBlendFilter.m in Sources */, + BCF868801728721300912E34 /* GPUImageSoftLightBlendFilter.m in Sources */, + BCF868821728721300912E34 /* GPUImageSubtractBlendFilter.m in Sources */, + BCF868861728773900912E34 /* GPUImageTwoInputCrossTextureSamplingFilter.m in Sources */, + BCF8689017287ED000912E34 /* GPUImageChromaKeyBlendFilter.m in Sources */, + BCF8689217287ED000912E34 /* GPUImageDissolveBlendFilter.m in Sources */, + BCF86896172880B800912E34 /* GPUImageSourceOverBlendFilter.m in Sources */, + BCF8689A172883A300912E34 /* GPUImageMaskFilter.m in Sources */, + BCF8689E1728862100912E34 /* GPUImageOpacityFilter.m in Sources */, + BCF868A21728866400912E34 /* GPUImageAlphaBlendFilter.m in Sources */, + BCB009E7172A1BBD00DB804C /* GPUImageGammaFilter.m in Sources */, + BCB009EB172A1BE800DB804C /* GPUImageToneCurveFilter.m in Sources */, + BCB009F1172A23AA00DB804C /* GPUImageHazeFilter.m in Sources */, + BCB009F3172A23AA00DB804C /* GPUImageHighlightShadowFilter.m in Sources */, + BCB009F7172A243400DB804C /* GPUImageSepiaFilter.m in Sources */, + BCB009FB172A246E00DB804C /* GPUImageColorMatrixFilter.m in Sources */, + BC78B70F172DCCB800342C6A /* GPUImageAmatorkaFilter.m in Sources */, + BC78B711172DCCB800342C6A /* GPUImageLookupFilter.m in Sources */, + BC78B713172DCCB800342C6A /* GPUImageMissEtikateFilter.m in Sources */, + BC78B715172DCCB800342C6A /* GPUImageSoftEleganceFilter.m in Sources */, + BCBF6932173061D200E5792A /* GPUImage3x3ConvolutionFilter.m in Sources */, + BCBF69361730623200E5792A /* GPUImageEmbossFilter.m in Sources */, + BC6C553D1730636600EB222D /* GPUImageLaplacianFilter.m in Sources */, + BC6C55471730BDCF00EB222D /* GPUImageLanczosResamplingFilter.m in Sources */, + BCAE41E51731C3000020F80D /* GPUImageThreeInputFilter.m in Sources */, + BCC0DF4317358315007C485F /* GPUImageColorInvertFilter.m in Sources */, + BCC0DF4917359529007C485F /* GPUImageHistogramFilter.m in Sources */, + BCC0DF4B17359529007C485F /* GPUImageHistogramGenerator.m in Sources */, + BCC0DF53173595D3007C485F /* GPUImageAverageColor.m in Sources */, + BCC0DF55173595D3007C485F /* GPUImageLuminosity.m in Sources */, + BCC0DF57173595D3007C485F /* GPUImageSolidColorGenerator.m in Sources */, + BCC0DF5B173596A8007C485F /* GPUImageAdaptiveThresholdFilter.m in Sources */, + BCC0DF5F173596B9007C485F /* GPUImageAverageLuminanceThresholdFilter.m in Sources */, + BCC0DF6317359F43007C485F /* GPUImageLuminanceThresholdFilter.m in Sources */, + BCE817A91735FDC70071D084 /* GPUImageHalftoneFilter.m in Sources */, + BCE817AB1735FDC70071D084 /* GPUImagePixellatePositionFilter.m in Sources */, + BCE817AD1735FDC70071D084 /* GPUImagePolarPixellateFilter.m in Sources */, + BCE817AF1735FDC70071D084 /* GPUImagePolkaDotFilter.m in Sources */, + BCE817B31735FE150071D084 /* GPUImageCrosshatchFilter.m in Sources */, + BCE817BB173608690071D084 /* GPUImageCannyEdgeDetectionFilter.m in Sources */, + BCE817BD173608690071D084 /* GPUImagePrewittEdgeDetectionFilter.m in Sources */, + BCE817BF173608690071D084 /* GPUImageThresholdEdgeDetectionFilter.m in Sources */, + BCE817C31736092A0071D084 /* GPUImageXYDerivativeFilter.m in Sources */, + BCE817CD17360BFD0071D084 /* GPUImageDirectionalNonMaximumSuppressionFilter.m in Sources */, + BCE817CF17360BFD0071D084 /* GPUimageDirectionalSobelEdgeDetectionFilter.m in Sources */, + BCE817D317360C6A0071D084 /* GPUImageWeakPixelInclusionFilter.m in Sources */, + BCE2EF8917459D5B00C0628C /* GPUImageHarrisCornerDetectionFilter.m in Sources */, + BCE2EF8B17459D5B00C0628C /* GPUImageNobleCornerDetectionFilter.m in Sources */, + BCE2EF8D17459D5B00C0628C /* GPUImageShiTomasiFeatureDetectionFilter.m in Sources */, + BCE2EF9317459D9300C0628C /* GPUImageColorPackingFilter.m in Sources */, + BCE2EF9517459D9300C0628C /* GPUImageThresholdedNonMaximumSuppressionFilter.m in Sources */, + BCE2EF9917459E2400C0628C /* GPUImageHoughTransformLineDetector.m in Sources */, + BCE2EF9D17459E5100C0628C /* GPUImageParallelCoordinateLineTransformFilter.m in Sources */, + BC56957B174683620081491B /* GPUImageCrosshairGenerator.m in Sources */, + BC56957D174683620081491B /* GPUImageLineGenerator.m in Sources */, + BCA20B581746C9EC0097C84A /* GPUImageBuffer.m in Sources */, + BCA20B5A1746C9EC0097C84A /* GPUImageHighPassFilter.m in Sources */, + BCA20B5C1746C9EC0097C84A /* GPUImageLowPassFilter.m in Sources */, + BCA20B5E1746C9EC0097C84A /* GPUImageMotionDetector.m in Sources */, + BCA20B621746CFE60097C84A /* GPUImageThresholdSketchFilter.m in Sources */, + BCA20B661746D3C30097C84A /* GPUImageSmoothToonFilter.m in Sources */, + BCA20B6A1746D3DD0097C84A /* GPUImageTiltShiftFilter.m in Sources */, + BCA20B6E1746D44B0097C84A /* GPUImageCGAColorspaceFilter.m in Sources */, + BCA20B721746D4AC0097C84A /* GPUImagePosterizeFilter.m in Sources */, + BCA20B761746DE850097C84A /* GPUImageKuwaharaRadius3Filter.m in Sources */, + BCA20B7A1746DEDE0097C84A /* GPUImageChromaKeyFilter.m in Sources */, + BCA20B7E1746DF210097C84A /* GPUImageVignetteFilter.m in Sources */, + BCA20B9A1747081D0097C84A /* GPUImageBulgeDistortionFilter.m in Sources */, + BCA20B9C1747081D0097C84A /* GPUImageClosingFilter.m in Sources */, + BCA20B9E1747081D0097C84A /* GPUImageDilationFilter.m in Sources */, + BCA20BA01747081D0097C84A /* GPUImageErosionFilter.m in Sources */, + BCA20BA21747081D0097C84A /* GPUImageGlassSphereFilter.m in Sources */, + BCA20BA41747081D0097C84A /* GPUImagePinchDistortionFilter.m in Sources */, + BCA20BA61747081D0097C84A /* GPUImageRGBClosingFilter.m in Sources */, + BCA20BA81747081D0097C84A /* GPUImageRGBDilationFilter.m in Sources */, + BCA20BAA1747081D0097C84A /* GPUImageRGBErosionFilter.m in Sources */, + BCA20BAC1747081D0097C84A /* GPUImageRGBOpeningFilter.m in Sources */, + BCA20BAE1747081D0097C84A /* GPUImageSphereRefractionFilter.m in Sources */, + BCA20BB01747081D0097C84A /* GPUImageStretchDistortionFilter.m in Sources */, + BCA20BB21747081D0097C84A /* GPUImageSwirlFilter.m in Sources */, + BCA20BB6174708630097C84A /* GPUImageOpeningFilter.m in Sources */, + BCA20BC217471C4B0097C84A /* GPUImageJFAVoronoiFilter.m in Sources */, + BCA20BC417471C4B0097C84A /* GPUImageLocalBinaryPatternFilter.m in Sources */, + BCA20BC617471C4B0097C84A /* GPUImageMosaicFilter.m in Sources */, + BCA20BC817471C4B0097C84A /* GPUImagePerlinNoiseFilter.m in Sources */, + BCA20BCA17471C4B0097C84A /* GPUImageVoronoiConsumerFilter.m in Sources */, + BC96A4F2176563C300F215A2 /* GPUImageNonMaximumSuppressionFilter.m in Sources */, + A87E5E13177648F3007FD5B1 /* GPUImageRawDataOutput.m in Sources */, + A87E5E1717764B16007FD5B1 /* GPUImageRawDataInput.m in Sources */, + BC8A583318124ABD00E6B507 /* GPUImageSingleComponentGaussianBlurFilter.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin XCBuildConfiguration section */ + BCA3F32F17239B6500E28AEC /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_ENABLE_OBJC_EXCEPTIONS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_SYMBOLS_PRIVATE_EXTERN = NO; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + INSTALL_PATH = "@loader_path/../Frameworks"; + MACOSX_DEPLOYMENT_TARGET = 10.6; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = macosx; + }; + name = Debug; + }; + BCA3F33017239B6500E28AEC /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = YES; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_ENABLE_OBJC_EXCEPTIONS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + INSTALL_PATH = "@loader_path/../Frameworks"; + MACOSX_DEPLOYMENT_TARGET = 10.6; + SDKROOT = macosx; + }; + name = Release; + }; + BCA3F33217239B6500E28AEC /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + COMBINE_HIDPI_IMAGES = YES; + DYLIB_COMPATIBILITY_VERSION = 1; + DYLIB_CURRENT_VERSION = 1; + FRAMEWORK_VERSION = A; + GCC_PRECOMPILE_PREFIX_HEADER = YES; + GCC_PREFIX_HEADER = "Source/Mac/GPUImageMac-Prefix.pch"; + INFOPLIST_FILE = "Source/Mac/GPUImageMac-Info.plist"; + PRODUCT_NAME = "$(TARGET_NAME)"; + WRAPPER_EXTENSION = framework; + }; + name = Debug; + }; + BCA3F33317239B6500E28AEC /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + COMBINE_HIDPI_IMAGES = YES; + DYLIB_COMPATIBILITY_VERSION = 1; + DYLIB_CURRENT_VERSION = 1; + FRAMEWORK_VERSION = A; + GCC_PRECOMPILE_PREFIX_HEADER = YES; + GCC_PREFIX_HEADER = "Source/Mac/GPUImageMac-Prefix.pch"; + INFOPLIST_FILE = "Source/Mac/GPUImageMac-Info.plist"; + PRODUCT_NAME = "$(TARGET_NAME)"; + WRAPPER_EXTENSION = framework; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + BCA3F31617239B6500E28AEC /* Build configuration list for PBXProject "GPUImageMac" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BCA3F32F17239B6500E28AEC /* Debug */, + BCA3F33017239B6500E28AEC /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BCA3F33117239B6500E28AEC /* Build configuration list for PBXNativeTarget "GPUImage" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BCA3F33217239B6500E28AEC /* Debug */, + BCA3F33317239B6500E28AEC /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = BCA3F31317239B6500E28AEC /* Project object */; +} diff --git a/GPUImage/GPUImageMac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme b/GPUImage/GPUImageMac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme new file mode 100644 index 0000000..57e1510 --- /dev/null +++ b/GPUImage/GPUImageMac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/GPUImage/Resources/lookup.png b/GPUImage/Resources/lookup.png new file mode 100644 index 0000000..ed814df Binary files /dev/null and b/GPUImage/Resources/lookup.png differ diff --git a/GPUImage/Resources/lookup_amatorka.png b/GPUImage/Resources/lookup_amatorka.png new file mode 100644 index 0000000..4a2cc8a Binary files /dev/null and b/GPUImage/Resources/lookup_amatorka.png differ diff --git a/GPUImage/Resources/lookup_miss_etikate.png b/GPUImage/Resources/lookup_miss_etikate.png new file mode 100644 index 0000000..e1317d7 Binary files /dev/null and b/GPUImage/Resources/lookup_miss_etikate.png differ diff --git a/GPUImage/Resources/lookup_soft_elegance_1.png b/GPUImage/Resources/lookup_soft_elegance_1.png new file mode 100644 index 0000000..525437a Binary files /dev/null and b/GPUImage/Resources/lookup_soft_elegance_1.png differ diff --git a/GPUImage/Resources/lookup_soft_elegance_2.png b/GPUImage/Resources/lookup_soft_elegance_2.png new file mode 100644 index 0000000..ccc6d4e Binary files /dev/null and b/GPUImage/Resources/lookup_soft_elegance_2.png differ diff --git a/GPUImage/Source/GLProgram.h b/GPUImage/Source/GLProgram.h new file mode 100755 index 0000000..572d8ae --- /dev/null +++ b/GPUImage/Source/GLProgram.h @@ -0,0 +1,42 @@ +// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book. +// A description of this can be found at his page on the topic: +// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html +// I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders + +#import + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#import +#import +#else +#import +#import +#endif + +@interface GLProgram : NSObject +{ + NSMutableArray *attributes; + NSMutableArray *uniforms; + GLuint program, + vertShader, + fragShader; +} + +@property(readwrite, nonatomic) BOOL initialized; + +- (id)initWithVertexShaderString:(NSString *)vShaderString + fragmentShaderString:(NSString *)fShaderString; +- (id)initWithVertexShaderString:(NSString *)vShaderString + fragmentShaderFilename:(NSString *)fShaderFilename; +- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename + fragmentShaderFilename:(NSString *)fShaderFilename; +- (void)addAttribute:(NSString *)attributeName; +- (GLuint)attributeIndex:(NSString *)attributeName; +- (GLuint)uniformIndex:(NSString *)uniformName; +- (BOOL)link; +- (void)use; +- (NSString *)vertexShaderLog; +- (NSString *)fragmentShaderLog; +- (NSString *)programLog; +- (void)validate; +@end diff --git a/GPUImage/Source/GLProgram.m b/GPUImage/Source/GLProgram.m new file mode 100755 index 0000000..7ce6ece --- /dev/null +++ b/GPUImage/Source/GLProgram.m @@ -0,0 +1,273 @@ +// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book. +// A description of this can be found at his page on the topic: +// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html + + +#import "GLProgram.h" +// START:typedefs +#pragma mark Function Pointer Definitions +typedef void (*GLInfoFunction)(GLuint program, + GLenum pname, + GLint* params); +typedef void (*GLLogFunction) (GLuint program, + GLsizei bufsize, + GLsizei* length, + GLchar* infolog); +// END:typedefs +#pragma mark - +#pragma mark Private Extension Method Declaration +// START:extension +@interface GLProgram() + +- (BOOL)compileShader:(GLuint *)shader + type:(GLenum)type + string:(NSString *)shaderString; +- (NSString *)logForOpenGLObject:(GLuint)object + infoCallback:(GLInfoFunction)infoFunc + logFunc:(GLLogFunction)logFunc; +@end +// END:extension +#pragma mark - + +@implementation GLProgram +// START:init + +@synthesize initialized = _initialized; + +- (id)initWithVertexShaderString:(NSString *)vShaderString + fragmentShaderString:(NSString *)fShaderString; +{ + if ((self = [super init])) + { + _initialized = NO; + + attributes = [[NSMutableArray alloc] init]; + uniforms = [[NSMutableArray alloc] init]; + program = glCreateProgram(); + + if (![self compileShader:&vertShader + type:GL_VERTEX_SHADER + string:vShaderString]) + NSLog(@"Failed to compile vertex shader"); + + // Create and compile fragment shader + if (![self compileShader:&fragShader + type:GL_FRAGMENT_SHADER + string:fShaderString]) + NSLog(@"Failed to compile fragment shader"); + + glAttachShader(program, vertShader); + glAttachShader(program, fragShader); + } + + return self; +} + +- (id)initWithVertexShaderString:(NSString *)vShaderString + fragmentShaderFilename:(NSString *)fShaderFilename; +{ + NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"]; + NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil]; + + if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString])) + { + } + + return self; +} + +- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename + fragmentShaderFilename:(NSString *)fShaderFilename; +{ + NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@"vsh"]; + NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil]; + + NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"]; + NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil]; + + if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString])) + { + } + + return self; +} +// END:init +// START:compile +- (BOOL)compileShader:(GLuint *)shader + type:(GLenum)type + string:(NSString *)shaderString +{ +// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + GLint status; + const GLchar *source; + + source = + (GLchar *)[shaderString UTF8String]; + if (!source) + { + NSLog(@"Failed to load vertex shader"); + return NO; + } + + *shader = glCreateShader(type); + glShaderSource(*shader, 1, &source, NULL); + glCompileShader(*shader); + + glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); + + if (status != GL_TRUE) + { + GLint logLength; + glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) + { + GLchar *log = (GLchar *)malloc(logLength); + glGetShaderInfoLog(*shader, logLength, &logLength, log); + NSLog(@"Shader compile log:\n%s", log); + free(log); + } + } + +// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime); +// NSLog(@"Compiled in %f ms", linkTime * 1000.0); + + return status == GL_TRUE; +} +// END:compile +#pragma mark - +// START:addattribute +- (void)addAttribute:(NSString *)attributeName +{ + if (![attributes containsObject:attributeName]) + { + [attributes addObject:attributeName]; + glBindAttribLocation(program, + (GLuint)[attributes indexOfObject:attributeName], + [attributeName UTF8String]); + } +} +// END:addattribute +// START:indexmethods +- (GLuint)attributeIndex:(NSString *)attributeName +{ + return (GLuint)[attributes indexOfObject:attributeName]; +} +- (GLuint)uniformIndex:(NSString *)uniformName +{ + return glGetUniformLocation(program, [uniformName UTF8String]); +} +// END:indexmethods +#pragma mark - +// START:link +- (BOOL)link +{ +// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + GLint status; + + glLinkProgram(program); + + glGetProgramiv(program, GL_LINK_STATUS, &status); + if (status == GL_FALSE) + return NO; + + if (vertShader) + { + glDeleteShader(vertShader); + vertShader = 0; + } + if (fragShader) + { + glDeleteShader(fragShader); + fragShader = 0; + } + + self.initialized = YES; + +// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime); +// NSLog(@"Linked in %f ms", linkTime * 1000.0); + + return YES; +} +// END:link +// START:use +- (void)use +{ + glUseProgram(program); +} +// END:use +#pragma mark - +// START:privatelog +- (NSString *)logForOpenGLObject:(GLuint)object + infoCallback:(GLInfoFunction)infoFunc + logFunc:(GLLogFunction)logFunc +{ + GLint logLength = 0, charsWritten = 0; + + infoFunc(object, GL_INFO_LOG_LENGTH, &logLength); + if (logLength < 1) + return nil; + + char *logBytes = malloc(logLength); + logFunc(object, logLength, &charsWritten, logBytes); + NSString *log = [[NSString alloc] initWithBytes:logBytes + length:logLength + encoding:NSUTF8StringEncoding]; + free(logBytes); + return log; +} +// END:privatelog +// START:log +- (NSString *)vertexShaderLog +{ + return [self logForOpenGLObject:vertShader + infoCallback:(GLInfoFunction)&glGetProgramiv + logFunc:(GLLogFunction)&glGetProgramInfoLog]; + +} +- (NSString *)fragmentShaderLog +{ + return [self logForOpenGLObject:fragShader + infoCallback:(GLInfoFunction)&glGetProgramiv + logFunc:(GLLogFunction)&glGetProgramInfoLog]; +} +- (NSString *)programLog +{ + return [self logForOpenGLObject:program + infoCallback:(GLInfoFunction)&glGetProgramiv + logFunc:(GLLogFunction)&glGetProgramInfoLog]; +} +// END:log + +- (void)validate; +{ + GLint logLength; + + glValidateProgram(program); + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength); + if (logLength > 0) + { + GLchar *log = (GLchar *)malloc(logLength); + glGetProgramInfoLog(program, logLength, &logLength, log); + NSLog(@"Program validate log:\n%s", log); + free(log); + } +} + +#pragma mark - +// START:dealloc +- (void)dealloc +{ + if (vertShader) + glDeleteShader(vertShader); + + if (fragShader) + glDeleteShader(fragShader); + + if (program) + glDeleteProgram(program); + +} +// END:dealloc +@end diff --git a/GPUImage/Source/GPUImage.h b/GPUImage/Source/GPUImage.h new file mode 100755 index 0000000..fa08e98 --- /dev/null +++ b/GPUImage/Source/GPUImage.h @@ -0,0 +1,155 @@ +#import "GLProgram.h" + +// Base classes +#import "GPUImageContext.h" +#import "GPUImageOutput.h" +#import "GPUImageView.h" +#import "GPUImageVideoCamera.h" +#import "GPUImageStillCamera.h" +#import "GPUImageMovie.h" +#import "GPUImagePicture.h" +#import "GPUImageRawDataInput.h" +#import "GPUImageRawDataOutput.h" +#import "GPUImageMovieWriter.h" +#import "GPUImageFilterPipeline.h" +#import "GPUImageTextureOutput.h" +#import "GPUImageFilterGroup.h" +#import "GPUImageTextureInput.h" +#import "GPUImageUIElement.h" +#import "GPUImageBuffer.h" + +// Filters +#import "GPUImageFilter.h" +#import "GPUImageTwoInputFilter.h" +#import "GPUImagePixellateFilter.h" +#import "GPUImagePixellatePositionFilter.h" +#import "GPUImageSepiaFilter.h" +#import "GPUImageColorInvertFilter.h" +#import "GPUImageSaturationFilter.h" +#import "GPUImageContrastFilter.h" +#import "GPUImageExposureFilter.h" +#import "GPUImageBrightnessFilter.h" +#import "GPUImageLevelsFilter.h" +#import "GPUImageSharpenFilter.h" +#import "GPUImageGammaFilter.h" +#import "GPUImageSobelEdgeDetectionFilter.h" +#import "GPUImageSketchFilter.h" +#import "GPUImageToonFilter.h" +#import "GPUImageSmoothToonFilter.h" +#import "GPUImageMultiplyBlendFilter.h" +#import "GPUImageDissolveBlendFilter.h" +#import "GPUImageKuwaharaFilter.h" +#import "GPUImageKuwaharaRadius3Filter.h" +#import "GPUImageVignetteFilter.h" +#import "GPUImageGaussianBlurFilter.h" +#import "GPUImageGaussianBlurPositionFilter.h" +#import "GPUImageGaussianSelectiveBlurFilter.h" +#import "GPUImageOverlayBlendFilter.h" +#import "GPUImageDarkenBlendFilter.h" +#import "GPUImageLightenBlendFilter.h" +#import "GPUImageSwirlFilter.h" +#import "GPUImageSourceOverBlendFilter.h" +#import "GPUImageColorBurnBlendFilter.h" +#import "GPUImageColorDodgeBlendFilter.h" +#import "GPUImageScreenBlendFilter.h" +#import "GPUImageExclusionBlendFilter.h" +#import "GPUImageDifferenceBlendFilter.h" +#import "GPUImageSubtractBlendFilter.h" +#import "GPUImageHardLightBlendFilter.h" +#import "GPUImageSoftLightBlendFilter.h" +#import "GPUImageColorBlendFilter.h" +#import "GPUImageHueBlendFilter.h" +#import "GPUImageSaturationBlendFilter.h" +#import "GPUImageLuminosityBlendFilter.h" +#import "GPUImageCropFilter.h" +#import "GPUImageGrayscaleFilter.h" +#import "GPUImageTransformFilter.h" +#import "GPUImageChromaKeyBlendFilter.h" +#import "GPUImageHazeFilter.h" +#import "GPUImageLuminanceThresholdFilter.h" +#import "GPUImagePosterizeFilter.h" +#import "GPUImageBoxBlurFilter.h" +#import "GPUImageAdaptiveThresholdFilter.h" +#import "GPUImageUnsharpMaskFilter.h" +#import "GPUImageBulgeDistortionFilter.h" +#import "GPUImagePinchDistortionFilter.h" +#import "GPUImageCrosshatchFilter.h" +#import "GPUImageCGAColorspaceFilter.h" +#import "GPUImagePolarPixellateFilter.h" +#import "GPUImageStretchDistortionFilter.h" +#import "GPUImagePerlinNoiseFilter.h" +#import "GPUImageJFAVoronoiFilter.h" +#import "GPUImageVoronoiConsumerFilter.h" +#import "GPUImageMosaicFilter.h" +#import "GPUImageTiltShiftFilter.h" +#import "GPUImage3x3ConvolutionFilter.h" +#import "GPUImageEmbossFilter.h" +#import "GPUImageCannyEdgeDetectionFilter.h" +#import "GPUImageThresholdEdgeDetectionFilter.h" +#import "GPUImageMaskFilter.h" +#import "GPUImageHistogramFilter.h" +#import "GPUImageHistogramGenerator.h" +#import "GPUImagePrewittEdgeDetectionFilter.h" +#import "GPUImageXYDerivativeFilter.h" +#import "GPUImageHarrisCornerDetectionFilter.h" +#import "GPUImageAlphaBlendFilter.h" +#import "GPUImageNormalBlendFilter.h" +#import "GPUImageNonMaximumSuppressionFilter.h" +#import "GPUImageRGBFilter.h" +#import "GPUImageMedianFilter.h" +#import "GPUImageBilateralFilter.h" +#import "GPUImageCrosshairGenerator.h" +#import "GPUImageToneCurveFilter.h" +#import "GPUImageNobleCornerDetectionFilter.h" +#import "GPUImageShiTomasiFeatureDetectionFilter.h" +#import "GPUImageErosionFilter.h" +#import "GPUImageRGBErosionFilter.h" +#import "GPUImageDilationFilter.h" +#import "GPUImageRGBDilationFilter.h" +#import "GPUImageOpeningFilter.h" +#import "GPUImageRGBOpeningFilter.h" +#import "GPUImageClosingFilter.h" +#import "GPUImageRGBClosingFilter.h" +#import "GPUImageColorPackingFilter.h" +#import "GPUImageSphereRefractionFilter.h" +#import "GPUImageMonochromeFilter.h" +#import "GPUImageOpacityFilter.h" +#import "GPUImageHighlightShadowFilter.h" +#import "GPUImageFalseColorFilter.h" +#import "GPUImageHSBFilter.h" +#import "GPUImageHueFilter.h" +#import "GPUImageGlassSphereFilter.h" +#import "GPUImageLookupFilter.h" +#import "GPUImageAmatorkaFilter.h" +#import "GPUImageMissEtikateFilter.h" +#import "GPUImageSoftEleganceFilter.h" +#import "GPUImageAddBlendFilter.h" +#import "GPUImageDivideBlendFilter.h" +#import "GPUImagePolkaDotFilter.h" +#import "GPUImageLocalBinaryPatternFilter.h" +#import "GPUImageLanczosResamplingFilter.h" +#import "GPUImageAverageColor.h" +#import "GPUImageSolidColorGenerator.h" +#import "GPUImageLuminosity.h" +#import "GPUImageAverageLuminanceThresholdFilter.h" +#import "GPUImageWhiteBalanceFilter.h" +#import "GPUImageChromaKeyFilter.h" +#import "GPUImageLowPassFilter.h" +#import "GPUImageHighPassFilter.h" +#import "GPUImageMotionDetector.h" +#import "GPUImageHalftoneFilter.h" +#import "GPUImageThresholdedNonMaximumSuppressionFilter.h" +#import "GPUImageHoughTransformLineDetector.h" +#import "GPUImageParallelCoordinateLineTransformFilter.h" +#import "GPUImageThresholdSketchFilter.h" +#import "GPUImageLineGenerator.h" +#import "GPUImageLinearBurnBlendFilter.h" +#import "GPUImageGaussianBlurPositionFilter.h" +#import "GPUImagePixellatePositionFilter.h" +#import "GPUImageTwoInputCrossTextureSamplingFilter.h" +#import "GPUImagePoissonBlendFilter.h" +#import "GPUImageMotionBlurFilter.h" +#import "GPUImageZoomBlurFilter.h" +#import "GPUImageLaplacianFilter.h" +#import "GPUImageiOSBlurFilter.h" +#import "GPUImageLuminanceRangeFilter.h" \ No newline at end of file diff --git a/GPUImage/Source/GPUImage3x3ConvolutionFilter.h b/GPUImage/Source/GPUImage3x3ConvolutionFilter.h new file mode 100755 index 0000000..67e68de --- /dev/null +++ b/GPUImage/Source/GPUImage3x3ConvolutionFilter.h @@ -0,0 +1,18 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +/** Runs a 3x3 convolution kernel against the image + */ +@interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter +{ + GLint convolutionMatrixUniform; +} + +/** Convolution kernel to run against the image + + The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels. + The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three + If the values in the matrix don't add up to 1.0, the image could be brightened or darkened. + */ +@property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel; + +@end diff --git a/GPUImage/Source/GPUImage3x3ConvolutionFilter.m b/GPUImage/Source/GPUImage3x3ConvolutionFilter.m new file mode 100755 index 0000000..c623ac6 --- /dev/null +++ b/GPUImage/Source/GPUImage3x3ConvolutionFilter.m @@ -0,0 +1,128 @@ +#import "GPUImage3x3ConvolutionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + uniform mediump mat3 convolutionMatrix; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb; + mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb; + mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb; + mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb; + + mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2]; + resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2]; + resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2]; + + gl_FragColor = vec4(resultColor, centerColor.a); + } +); +#else +NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + uniform mat3 convolutionMatrix; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb; + vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb; + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb; + vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb; + + vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2]; + resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2]; + resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2]; + + gl_FragColor = vec4(resultColor, centerColor.a); + } +); +#endif + +@implementation GPUImage3x3ConvolutionFilter + +@synthesize convolutionKernel = _convolutionKernel; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImage3x3ConvolutionFragmentShaderString])) + { + return nil; + } + + self.convolutionKernel = (GPUMatrix3x3){ + {0.f, 0.f, 0.f}, + {0.f, 1.f, 0.f}, + {0.f, 0.f, 0.f} + }; + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + convolutionMatrixUniform = [filterProgram uniformIndex:@"convolutionMatrix"]; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setConvolutionKernel:(GPUMatrix3x3)newValue; +{ + _convolutionKernel = newValue; + + [self setMatrix3f:_convolutionKernel forUniform:convolutionMatrixUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImage3x3TextureSamplingFilter.h b/GPUImage/Source/GPUImage3x3TextureSamplingFilter.h new file mode 100644 index 0000000..5599e15 --- /dev/null +++ b/GPUImage/Source/GPUImage3x3TextureSamplingFilter.h @@ -0,0 +1,18 @@ +#import "GPUImageFilter.h" + +extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString; + +@interface GPUImage3x3TextureSamplingFilter : GPUImageFilter +{ + GLint texelWidthUniform, texelHeightUniform; + + CGFloat texelWidth, texelHeight; + BOOL hasOverriddenImageSizeFactor; +} + +// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects. +@property(readwrite, nonatomic) CGFloat texelWidth; +@property(readwrite, nonatomic) CGFloat texelHeight; + + +@end diff --git a/GPUImage/Source/GPUImage3x3TextureSamplingFilter.m b/GPUImage/Source/GPUImage3x3TextureSamplingFilter.m new file mode 100644 index 0000000..05c4d50 --- /dev/null +++ b/GPUImage/Source/GPUImage3x3TextureSamplingFilter.m @@ -0,0 +1,121 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +// Override vertex shader to remove dependent texture reads +NSString *const kGPUImageNearbyTexelSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform float texelWidth; + uniform float texelHeight; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 widthStep = vec2(texelWidth, 0.0); + vec2 heightStep = vec2(0.0, texelHeight); + vec2 widthHeightStep = vec2(texelWidth, texelHeight); + vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight); + + textureCoordinate = inputTextureCoordinate.xy; + leftTextureCoordinate = inputTextureCoordinate.xy - widthStep; + rightTextureCoordinate = inputTextureCoordinate.xy + widthStep; + + topTextureCoordinate = inputTextureCoordinate.xy - heightStep; + topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep; + topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep; + + bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep; + bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep; + bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep; + } +); + + +@implementation GPUImage3x3TextureSamplingFilter + +@synthesize texelWidth = _texelWidth; +@synthesize texelHeight = _texelHeight; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"]; + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [self initWithVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + return self; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + if (!hasOverriddenImageSizeFactor) + { + _texelWidth = 1.0 / filterFrameSize.width; + _texelHeight = 1.0 / filterFrameSize.height; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + glUniform1f(texelWidthUniform, _texelHeight); + glUniform1f(texelHeightUniform, _texelWidth); + } + else + { + glUniform1f(texelWidthUniform, _texelWidth); + glUniform1f(texelHeightUniform, _texelHeight); + } + }); + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setTexelWidth:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelWidth = newValue; + + [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram]; +} + +- (void)setTexelHeight:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelHeight = newValue; + + [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageAdaptiveThresholdFilter.h b/GPUImage/Source/GPUImageAdaptiveThresholdFilter.h new file mode 100755 index 0000000..3278556 --- /dev/null +++ b/GPUImage/Source/GPUImageAdaptiveThresholdFilter.h @@ -0,0 +1,9 @@ +#import "GPUImageFilterGroup.h" + +@interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup + +/** A multiplier for the background averaging blur radius in pixels, with a default of 4 + */ +@property(readwrite, nonatomic) CGFloat blurRadiusInPixels; + +@end diff --git a/GPUImage/Source/GPUImageAdaptiveThresholdFilter.m b/GPUImage/Source/GPUImageAdaptiveThresholdFilter.m new file mode 100755 index 0000000..71fa6ab --- /dev/null +++ b/GPUImage/Source/GPUImageAdaptiveThresholdFilter.m @@ -0,0 +1,100 @@ +#import "GPUImageAdaptiveThresholdFilter.h" +#import "GPUImageFilter.h" +#import "GPUImageTwoInputFilter.h" +#import "GPUImageGrayscaleFilter.h" +#import "GPUImageBoxBlurFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + highp float blurredInput = texture2D(inputImageTexture, textureCoordinate).r; + highp float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r; + highp float thresholdResult = step(blurredInput - 0.05, localLuminance); + + gl_FragColor = vec4(vec3(thresholdResult), 1.0); + } +); +#else +NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + float blurredInput = texture2D(inputImageTexture, textureCoordinate).r; + float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r; + float thresholdResult = step(blurredInput - 0.05, localLuminance); + + gl_FragColor = vec4(vec3(thresholdResult), 1.0); + } +); +#endif + +@interface GPUImageAdaptiveThresholdFilter() +{ + GPUImageBoxBlurFilter *boxBlurFilter; +} +@end + +@implementation GPUImageAdaptiveThresholdFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: reduce to luminance + GPUImageGrayscaleFilter *luminanceFilter = [[GPUImageGrayscaleFilter alloc] init]; + [self addFilter:luminanceFilter]; + + // Second pass: perform a box blur + boxBlurFilter = [[GPUImageBoxBlurFilter alloc] init]; + [self addFilter:boxBlurFilter]; + + // Third pass: compare the blurred background luminance to the local value + GPUImageFilter *adaptiveThresholdFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageAdaptiveThresholdFragmentShaderString]; + [self addFilter:adaptiveThresholdFilter]; + + [luminanceFilter addTarget:boxBlurFilter]; + + [boxBlurFilter addTarget:adaptiveThresholdFilter]; + // To prevent double updating of this filter, disable updates from the sharp luminance image side + [luminanceFilter addTarget:adaptiveThresholdFilter]; + + self.initialFilters = [NSArray arrayWithObject:luminanceFilter]; + self.terminalFilter = adaptiveThresholdFilter; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + boxBlurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return boxBlurFilter.blurRadiusInPixels; +} + +@end diff --git a/GPUImage/Source/GPUImageAddBlendFilter.h b/GPUImage/Source/GPUImageAddBlendFilter.h new file mode 100644 index 0000000..b14c60c --- /dev/null +++ b/GPUImage/Source/GPUImageAddBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageAddBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageAddBlendFilter.m b/GPUImage/Source/GPUImageAddBlendFilter.m new file mode 100644 index 0000000..c89054a --- /dev/null +++ b/GPUImage/Source/GPUImageAddBlendFilter.m @@ -0,0 +1,100 @@ +#import "GPUImageAddBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 base = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + mediump float r; + if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) { + r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } else { + r = overlay.r + base.r; + } + + mediump float g; + if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) { + g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } else { + g = overlay.g + base.g; + } + + mediump float b; + if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) { + b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } else { + b = overlay.b + base.b; + } + + mediump float a = overlay.a + base.a - overlay.a * base.a; + + gl_FragColor = vec4(r, g, b, a); + } +); +#else +NSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + float r; + if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) { + r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } else { + r = overlay.r + base.r; + } + + float g; + if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) { + g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } else { + g = overlay.g + base.g; + } + + float b; + if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) { + b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } else { + b = overlay.b + base.b; + } + + float a = overlay.a + base.a - overlay.a * base.a; + + gl_FragColor = vec4(r, g, b, a); + } +); +#endif + + + +@implementation GPUImageAddBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageAddBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageAlphaBlendFilter.h b/GPUImage/Source/GPUImageAlphaBlendFilter.h new file mode 100755 index 0000000..c4d7575 --- /dev/null +++ b/GPUImage/Source/GPUImageAlphaBlendFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter +{ + GLint mixUniform; +} + +// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level +@property(readwrite, nonatomic) CGFloat mix; + +@end diff --git a/GPUImage/Source/GPUImageAlphaBlendFilter.m b/GPUImage/Source/GPUImageAlphaBlendFilter.m new file mode 100755 index 0000000..077df79 --- /dev/null +++ b/GPUImage/Source/GPUImageAlphaBlendFilter.m @@ -0,0 +1,72 @@ +#import "GPUImageAlphaBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform lowp float mixturePercent; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a); + } +); +#else +NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform float mixturePercent; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a); + } +); +#endif + +@implementation GPUImageAlphaBlendFilter + +@synthesize mix = _mix; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageAlphaBlendFragmentShaderString])) + { + return nil; + } + + mixUniform = [filterProgram uniformIndex:@"mixturePercent"]; + self.mix = 0.5; + + return self; +} + + +#pragma mark - +#pragma mark Accessors + +- (void)setMix:(CGFloat)newValue; +{ + _mix = newValue; + + [self setFloat:_mix forUniform:mixUniform program:filterProgram]; +} + + +@end diff --git a/GPUImage/Source/GPUImageAmatorkaFilter.h b/GPUImage/Source/GPUImageAmatorkaFilter.h new file mode 100755 index 0000000..1dbe096 --- /dev/null +++ b/GPUImage/Source/GPUImageAmatorkaFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImagePicture; + +/** A photo filter based on Photoshop action by Amatorka + http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631 + */ + +// Note: If you want to use this effect you have to add lookup_amatorka.png +// from Resources folder to your application bundle. + +@interface GPUImageAmatorkaFilter : GPUImageFilterGroup +{ + GPUImagePicture *lookupImageSource; +} + +@end diff --git a/GPUImage/Source/GPUImageAmatorkaFilter.m b/GPUImage/Source/GPUImageAmatorkaFilter.m new file mode 100755 index 0000000..dbb29f4 --- /dev/null +++ b/GPUImage/Source/GPUImageAmatorkaFilter.m @@ -0,0 +1,43 @@ +#import "GPUImageAmatorkaFilter.h" +#import "GPUImagePicture.h" +#import "GPUImageLookupFilter.h" + +@implementation GPUImageAmatorkaFilter + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + UIImage *image = [UIImage imageNamed:@"lookup_amatorka.png"]; +#else + NSImage *image = [NSImage imageNamed:@"lookup_amatorka.png"]; +#endif + + NSAssert(image, @"To use GPUImageAmatorkaFilter you need to add lookup_amatorka.png from GPUImage/framework/Resources to your application bundle."); + + lookupImageSource = [[GPUImagePicture alloc] initWithImage:image]; + GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init]; + [self addFilter:lookupFilter]; + + [lookupImageSource addTarget:lookupFilter atTextureLocation:1]; + [lookupImageSource processImage]; + + self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil]; + self.terminalFilter = lookupFilter; + + return self; +} + +-(void)prepareForImageCapture { + [lookupImageSource processImage]; + [super prepareForImageCapture]; +} + +#pragma mark - +#pragma mark Accessors + +@end diff --git a/GPUImage/Source/GPUImageAverageColor.h b/GPUImage/Source/GPUImageAverageColor.h new file mode 100644 index 0000000..263fd5a --- /dev/null +++ b/GPUImage/Source/GPUImageAverageColor.h @@ -0,0 +1,20 @@ +#import "GPUImageFilter.h" + +extern NSString *const kGPUImageColorAveragingVertexShaderString; + +@interface GPUImageAverageColor : GPUImageFilter +{ + GLint texelWidthUniform, texelHeightUniform; + + NSUInteger numberOfStages; + NSMutableArray *stageTextures, *stageFramebuffers, *stageSizes; + + GLubyte *rawImagePixels; +} + +// This block is called on the completion of color averaging for a frame +@property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime); + +- (void)extractAverageColorAtFrameTime:(CMTime)frameTime; + +@end diff --git a/GPUImage/Source/GPUImageAverageColor.m b/GPUImage/Source/GPUImageAverageColor.m new file mode 100644 index 0000000..4f3ac74 --- /dev/null +++ b/GPUImage/Source/GPUImageAverageColor.m @@ -0,0 +1,389 @@ +#import "GPUImageAverageColor.h" + +NSString *const kGPUImageColorAveragingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform float texelWidth; + uniform float texelHeight; + + varying vec2 upperLeftInputTextureCoordinate; + varying vec2 upperRightInputTextureCoordinate; + varying vec2 lowerLeftInputTextureCoordinate; + varying vec2 lowerRightInputTextureCoordinate; + + void main() + { + gl_Position = position; + + upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight); + upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight); + lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight); + lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight); + } + ); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + varying highp vec2 outputTextureCoordinate; + + varying highp vec2 upperLeftInputTextureCoordinate; + varying highp vec2 upperRightInputTextureCoordinate; + varying highp vec2 lowerLeftInputTextureCoordinate; + varying highp vec2 lowerRightInputTextureCoordinate; + + void main() + { + highp vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate); + highp vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate); + highp vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate); + highp vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate); + + gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor); + } +); +#else +NSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 outputTextureCoordinate; + + varying vec2 upperLeftInputTextureCoordinate; + varying vec2 upperRightInputTextureCoordinate; + varying vec2 lowerLeftInputTextureCoordinate; + varying vec2 lowerRightInputTextureCoordinate; + + void main() + { + vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate); + vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate); + vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate); + vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate); + + gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor); + } +); +#endif + +@implementation GPUImageAverageColor + +@synthesize colorAverageProcessingFinishedBlock = _colorAverageProcessingFinishedBlock; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageColorAveragingFragmentShaderString])) + { + return nil; + } + + texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"]; + + stageTextures = [[NSMutableArray alloc] init]; + stageFramebuffers = [[NSMutableArray alloc] init]; + stageSizes = [[NSMutableArray alloc] init]; + + __unsafe_unretained GPUImageAverageColor *weakSelf = self; + [self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) { + [weakSelf extractAverageColorAtFrameTime:frameTime]; + }]; + + return self; +} + +- (void)dealloc; +{ + if (rawImagePixels != NULL) + { + free(rawImagePixels); + } +} + +#pragma mark - +#pragma mark Manage the output texture + +- (void)initializeOutputTextureIfNeeded; +{ + if (inputTextureSize.width < 1.0) + { + return; + } + + // Create textures for each level + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + NSUInteger numberOfReductionsInX = floor(log(inputTextureSize.width) / log(4.0)); + NSUInteger numberOfReductionsInY = floor(log(inputTextureSize.height) / log(4.0)); +// NSLog(@"Reductions in X: %d, y: %d", numberOfReductionsInX, numberOfReductionsInY); + + NSUInteger reductionsToHitSideLimit = MIN(numberOfReductionsInX, numberOfReductionsInY); +// NSLog(@"Total reductions: %d", reductionsToHitSideLimit); + for (NSUInteger currentReduction = 0; currentReduction < reductionsToHitSideLimit; currentReduction++) + { +// CGSize currentStageSize = CGSizeMake(ceil(inputTextureSize.width / pow(4.0, currentReduction + 1.0)), ceil(inputTextureSize.height / pow(4.0, currentReduction + 1.0))); + CGSize currentStageSize = CGSizeMake(floor(inputTextureSize.width / pow(4.0, currentReduction + 1.0)), floor(inputTextureSize.height / pow(4.0, currentReduction + 1.0))); + if ( (currentStageSize.height < 2.0) || (currentStageSize.width < 2.0) ) + { + // A really small last stage seems to cause significant errors in the average, so I abort and leave the rest to the CPU at this point + break; +// currentStageSize.height = 2.0; // TODO: Rotate the image to account for this case, which causes FBO construction to fail + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [stageSizes addObject:[NSValue valueWithCGSize:currentStageSize]]; +#else + [stageSizes addObject:[NSValue valueWithSize:NSSizeFromCGSize(currentStageSize)]]; +#endif + + GLuint textureForStage; + glGenTextures(1, &textureForStage); + glBindTexture(GL_TEXTURE_2D, textureForStage); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + [stageTextures addObject:[NSNumber numberWithInt:textureForStage]]; + +// NSLog(@"At reduction: %d size in X: %f, size in Y:%f", currentReduction, currentStageSize.width, currentStageSize.height); + } + }); +} + +- (void)deleteOutputTexture; +{ + if ([stageTextures count] == 0) + { + return; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + NSUInteger numberOfStageTextures = [stageTextures count]; + for (NSUInteger currentStage = 0; currentStage < numberOfStageTextures; currentStage++) + { + GLuint currentTexture = [[stageTextures objectAtIndex:currentStage] intValue]; + glDeleteTextures(1, ¤tTexture); + } + + [stageTextures removeAllObjects]; + [stageSizes removeAllObjects]; + }); +} + +#pragma mark - +#pragma mark Managing the display FBOs + +- (void)recreateFilterFBO +{ + cachedMaximumOutputSize = CGSizeZero; + [self destroyFilterFBO]; + [self deleteOutputTexture]; + [self initializeOutputTextureIfNeeded]; + + [self setFilterFBO]; +} + +- (void)createFilterFBOofSize:(CGSize)currentFBOSize; +{ + // Create framebuffers for each level + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + glActiveTexture(GL_TEXTURE1); + + NSUInteger numberOfStageFramebuffers = [stageTextures count]; + for (NSUInteger currentStage = 0; currentStage < numberOfStageFramebuffers; currentStage++) + { + GLuint currentFramebuffer; + glGenFramebuffers(1, ¤tFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer); + [stageFramebuffers addObject:[NSNumber numberWithInt:currentFramebuffer]]; + + GLuint currentTexture = [[stageTextures objectAtIndex:currentStage] intValue]; + glBindTexture(GL_TEXTURE_2D, currentTexture); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGSize currentFramebufferSize = [[stageSizes objectAtIndex:currentStage] CGSizeValue]; +#else + NSSize currentFramebufferSize = [[stageSizes objectAtIndex:currentStage] sizeValue]; +#endif + +// NSLog(@"FBO stage size: %f, %f", currentFramebufferSize.width, currentFramebufferSize.height); + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + (int)currentFramebufferSize.width, + (int)currentFramebufferSize.height, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, + 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, currentTexture, 0); + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + } + }); + +// [self notifyTargetsAboutNewOutputTexture]; +} + +- (void)destroyFilterFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + NSUInteger numberOfStageFramebuffers = [stageFramebuffers count]; + for (NSUInteger currentStage = 0; currentStage < numberOfStageFramebuffers; currentStage++) + { + GLuint currentFramebuffer = [[stageFramebuffers objectAtIndex:currentStage] intValue]; + glDeleteFramebuffers(1, ¤tFramebuffer); + } + + [stageFramebuffers removeAllObjects]; + }); +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + GLuint currentTexture = sourceTexture; + + NSUInteger numberOfStageFramebuffers = [stageFramebuffers count]; + for (NSUInteger currentStage = 0; currentStage < numberOfStageFramebuffers; currentStage++) + { + GLuint currentFramebuffer = [[stageFramebuffers objectAtIndex:currentStage] intValue]; + glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGSize currentStageSize = [[stageSizes objectAtIndex:currentStage] CGSizeValue]; +#else + NSSize currentStageSize = [[stageSizes objectAtIndex:currentStage] sizeValue]; +#endif + glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, currentTexture); + + glUniform1i(filterInputTextureUniform, 2); + + glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width); + glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + currentTexture = [[stageTextures objectAtIndex:currentStage] intValue]; + +// NSUInteger totalBytesForImage = (int)currentStageSize.width * (int)currentStageSize.height * 4; +// GLubyte *rawImagePixels2 = (GLubyte *)malloc(totalBytesForImage); +// glReadPixels(0, 0, (int)currentStageSize.width, (int)currentStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels2); +// CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels2, totalBytesForImage, NULL); +// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB(); +// +// CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f; +// NSUInteger totalNumberOfPixels = totalBytesForImage / 4; +// +// for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++) +// { +// currentRedTotal += (CGFloat)rawImagePixels2[(currentPixel * 4)] / 255.0f; +// currentGreenTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 1] / 255.0f; +// currentBlueTotal += (CGFloat)rawImagePixels2[(currentPixel * 4 + 2)] / 255.0f; +// currentAlphaTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 3] / 255.0f; +// } +// +// NSLog(@"Stage %d average image red: %f, green: %f, blue: %f, alpha: %f", currentStage, currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels); +// +// +// CGImageRef cgImageFromBytes = CGImageCreate((int)currentStageSize.width, (int)currentStageSize.height, 8, 32, 4 * (int)currentStageSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault); +// +// UIImage *imageToSave = [UIImage imageWithCGImage:cgImageFromBytes]; +// +// NSData *dataForPNGFile = UIImagePNGRepresentation(imageToSave); +// +// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); +// NSString *documentsDirectory = [paths objectAtIndex:0]; +// +// NSString *imageName = [NSString stringWithFormat:@"AverageLevel%d.png", currentStage]; +// NSError *error = nil; +// if (![dataForPNGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:imageName] options:NSAtomicWrite error:&error]) +// { +// return; +// } + } +} + +- (void)prepareForImageCapture; +{ + preparedToCaptureImage = YES; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = kGPUImageNoRotation; +} + +- (void)extractAverageColorAtFrameTime:(CMTime)frameTime; +{ + // we need a normal color texture for averaging the color values + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture internal format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGSize finalStageSize = [[stageSizes lastObject] CGSizeValue]; +#else + NSSize finalStageSize = [[stageSizes lastObject] sizeValue]; +#endif + NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height); + + if (rawImagePixels == NULL) + { + rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4); + } + + glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); + + NSUInteger redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0; + NSUInteger byteIndex = 0; + for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++) + { + redTotal += rawImagePixels[byteIndex++]; + greenTotal += rawImagePixels[byteIndex++]; + blueTotal += rawImagePixels[byteIndex++]; + alphaTotal += rawImagePixels[byteIndex++]; + } + + CGFloat normalizedRedTotal = (CGFloat)redTotal / (CGFloat)totalNumberOfPixels / 255.0; + CGFloat normalizedGreenTotal = (CGFloat)greenTotal / (CGFloat)totalNumberOfPixels / 255.0; + CGFloat normalizedBlueTotal = (CGFloat)blueTotal / (CGFloat)totalNumberOfPixels / 255.0; + CGFloat normalizedAlphaTotal = (CGFloat)alphaTotal / (CGFloat)totalNumberOfPixels / 255.0; + + if (_colorAverageProcessingFinishedBlock != NULL) + { + _colorAverageProcessingFinishedBlock(normalizedRedTotal, normalizedGreenTotal, normalizedBlueTotal, normalizedAlphaTotal, frameTime); + } +} + +@end diff --git a/GPUImage/Source/GPUImageAverageLuminanceThresholdFilter.h b/GPUImage/Source/GPUImageAverageLuminanceThresholdFilter.h new file mode 100644 index 0000000..7f1ae46 --- /dev/null +++ b/GPUImage/Source/GPUImageAverageLuminanceThresholdFilter.h @@ -0,0 +1,8 @@ +#import "GPUImageFilterGroup.h" + +@interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup + +// This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0. +@property(readwrite, nonatomic) CGFloat thresholdMultiplier; + +@end diff --git a/GPUImage/Source/GPUImageAverageLuminanceThresholdFilter.m b/GPUImage/Source/GPUImageAverageLuminanceThresholdFilter.m new file mode 100644 index 0000000..eb2796f --- /dev/null +++ b/GPUImage/Source/GPUImageAverageLuminanceThresholdFilter.m @@ -0,0 +1,47 @@ +#import "GPUImageAverageLuminanceThresholdFilter.h" +#import "GPUImageLuminosity.h" +#import "GPUImageLuminanceThresholdFilter.h" + +@interface GPUImageAverageLuminanceThresholdFilter() +{ + GPUImageLuminosity *luminosityFilter; + GPUImageLuminanceThresholdFilter *luminanceThresholdFilter; +} +@end + +@implementation GPUImageAverageLuminanceThresholdFilter + +@synthesize thresholdMultiplier = _thresholdMultiplier; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + self.thresholdMultiplier = 1.0; + + luminosityFilter = [[GPUImageLuminosity alloc] init]; + [self addFilter:luminosityFilter]; + + luminanceThresholdFilter = [[GPUImageLuminanceThresholdFilter alloc] init]; + [self addFilter:luminanceThresholdFilter]; + + __unsafe_unretained GPUImageAverageLuminanceThresholdFilter *weakSelf = self; + __unsafe_unretained GPUImageLuminanceThresholdFilter *weakThreshold = luminanceThresholdFilter; + + [luminosityFilter setLuminosityProcessingFinishedBlock:^(CGFloat luminosity, CMTime frameTime) { + weakThreshold.threshold = luminosity * weakSelf.thresholdMultiplier; + }]; + + self.initialFilters = [NSArray arrayWithObjects:luminosityFilter, luminanceThresholdFilter, nil]; + self.terminalFilter = luminanceThresholdFilter; + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageBilateralFilter.h b/GPUImage/Source/GPUImageBilateralFilter.h new file mode 100644 index 0000000..6b736cc --- /dev/null +++ b/GPUImage/Source/GPUImageBilateralFilter.h @@ -0,0 +1,10 @@ +#import "GPUImageGaussianBlurFilter.h" + +@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter +{ + CGFloat firstDistanceNormalizationFactorUniform; + CGFloat secondDistanceNormalizationFactorUniform; +} +// A normalization factor for the distance between central color and sample color. +@property(nonatomic, readwrite) CGFloat distanceNormalizationFactor; +@end diff --git a/GPUImage/Source/GPUImageBilateralFilter.m b/GPUImage/Source/GPUImageBilateralFilter.m new file mode 100644 index 0000000..c2a8c86 --- /dev/null +++ b/GPUImage/Source/GPUImageBilateralFilter.m @@ -0,0 +1,231 @@ +#import "GPUImageBilateralFilter.h" + +NSString *const kGPUImageBilateralBlurVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + const int GAUSSIAN_SAMPLES = 9; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + + varying vec2 textureCoordinate; + varying vec2 blurCoordinates[GAUSSIAN_SAMPLES]; + + void main() + { + gl_Position = position; + textureCoordinate = inputTextureCoordinate.xy; + + // Calculate the positions for the blur + int multiplier = 0; + vec2 blurStep; + vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset); + + for (int i = 0; i < GAUSSIAN_SAMPLES; i++) + { + multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2)); + // Blur in x (horizontal) + blurStep = float(multiplier) * singleStepOffset; + blurCoordinates[i] = inputTextureCoordinate.xy + blurStep; + } + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + const lowp int GAUSSIAN_SAMPLES = 9; + + varying highp vec2 textureCoordinate; + varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES]; + + uniform mediump float distanceNormalizationFactor; + + void main() + { + lowp vec4 centralColor; + lowp float gaussianWeightTotal; + lowp vec4 sum; + lowp vec4 sampleColor; + lowp float distanceFromCentralColor; + lowp float gaussianWeight; + + centralColor = texture2D(inputImageTexture, blurCoordinates[4]); + gaussianWeightTotal = 0.18; + sum = centralColor * 0.18; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[0]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[1]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[2]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[3]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[5]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[6]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[7]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[8]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + gl_FragColor = sum / gaussianWeightTotal; + } +); +#else +NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + const int GAUSSIAN_SAMPLES = 9; + + varying vec2 textureCoordinate; + varying vec2 blurCoordinates[GAUSSIAN_SAMPLES]; + + uniform float distanceNormalizationFactor; + + void main() + { + vec4 centralColor; + float gaussianWeightTotal; + vec4 sum; + vec4 sampleColor; + float distanceFromCentralColor; + float gaussianWeight; + + centralColor = texture2D(inputImageTexture, blurCoordinates[4]); + gaussianWeightTotal = 0.18; + sum = centralColor * 0.18; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[0]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[1]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[2]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[3]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[5]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[6]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[7]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + sampleColor = texture2D(inputImageTexture, blurCoordinates[8]); + distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0); + gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor); + gaussianWeightTotal += gaussianWeight; + sum += sampleColor * gaussianWeight; + + gl_FragColor = sum / gaussianWeightTotal; + } +); +#endif + +@implementation GPUImageBilateralFilter + +@synthesize distanceNormalizationFactor = _distanceNormalizationFactor; + +- (id)init; +{ + + if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString + firstStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString + secondStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString + secondStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString])) { + return nil; + } + + firstDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@"distanceNormalizationFactor"]; + secondDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@"distanceNormalizationFactor"]; + + self.texelSpacingMultiplier = 4.0; + self.distanceNormalizationFactor = 8.0; + + + return self; +} + + +#pragma mark - +#pragma mark Accessors + +- (void)setDistanceNormalizationFactor:(CGFloat)newValue +{ + _distanceNormalizationFactor = newValue; + + [self setFloat:newValue + forUniform:firstDistanceNormalizationFactorUniform + program:filterProgram]; + + [self setFloat:newValue + forUniform:secondDistanceNormalizationFactorUniform + program:secondFilterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageBoxBlurFilter.h b/GPUImage/Source/GPUImageBoxBlurFilter.h new file mode 100755 index 0000000..3fd880b --- /dev/null +++ b/GPUImage/Source/GPUImageBoxBlurFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageGaussianBlurFilter.h" + +/** A hardware-accelerated box blur of an image + */ +@interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter + +@end diff --git a/GPUImage/Source/GPUImageBoxBlurFilter.m b/GPUImage/Source/GPUImageBoxBlurFilter.m new file mode 100755 index 0000000..2bae66e --- /dev/null +++ b/GPUImage/Source/GPUImageBoxBlurFilter.m @@ -0,0 +1,178 @@ +#import "GPUImageBoxBlurFilter.h" + + +@implementation GPUImageBoxBlurFilter + ++ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius == 0) + { + return nil; + } + + // From these weights we calculate the offsets to read interpolated values from + NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7); + + NSMutableString *shaderString = [[NSMutableString alloc] init]; + // Header + [shaderString appendFormat:@"\ + attribute vec4 position;\n\ + attribute vec4 inputTextureCoordinate;\n\ + \n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + gl_Position = position;\n\ + \n\ + vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))]; + + // Inner offset loop + [shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"]; + for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++) + { + GLfloat optimizedOffset = (GLfloat)(currentOptimizedOffset * 2) + 1.5; + + [shaderString appendFormat:@"\ + blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\ + blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedOffset, (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedOffset]; + } + + // Footer + [shaderString appendString:@"}\n"]; + + return shaderString; +} + ++ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImagePassthroughFragmentShaderString; + } + + NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7); + NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2); + + NSMutableString *shaderString = [[NSMutableString alloc] init]; + + // Header +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + uniform highp float texelWidthOffset;\n\ + uniform highp float texelHeightOffset;\n\ + \n\ + varying highp vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ]; +#else + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + vec4 sum = vec4(0.0);\n", 1 + (numberOfOptimizedOffsets * 2) ]; +#endif + + GLfloat boxWeight = 1.0 / (GLfloat)((blurRadius * 2) + 1); + + // Inner texture loop + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\n", boxWeight]; + + for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++) + { + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), boxWeight * 2.0]; + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), boxWeight * 2.0]; + } + + // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader + if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"]; +#else + [shaderString appendString:@"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"]; +#endif + + for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++) + { + GLfloat optimizedOffset = (GLfloat)(currentOverlowTextureRead * 2) + 1.5; + + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, boxWeight * 2.0]; + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, boxWeight * 2.0]; + } + } + + // Footer + [shaderString appendString:@"\ + gl_FragColor = sum;\n\ + }\n"]; + + return shaderString; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + [super setupFilterForSize:filterFrameSize]; + + if (shouldResizeBlurRadiusWithImageSize == YES) + { + + } +} + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + // NSString *currentGaussianBlurVertexShader = [GPUImageGaussianBlurFilter vertexShaderForStandardGaussianOfRadius:4 sigma:2.0]; + // NSString *currentGaussianBlurFragmentShader = [GPUImageGaussianBlurFilter fragmentShaderForStandardGaussianOfRadius:4 sigma:2.0]; + + NSString *currentBoxBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:0.0]; + NSString *currentBoxBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:0.0]; + + if (!(self = [super initWithFirstStageVertexShaderFromString:currentBoxBlurVertexShader firstStageFragmentShaderFromString:currentBoxBlurFragmentShader secondStageVertexShaderFromString:currentBoxBlurVertexShader secondStageFragmentShaderFromString:currentBoxBlurFragmentShader])) + { + return nil; + } + + _blurRadiusInPixels = 4.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + CGFloat newBlurRadius = round(round(newValue / 2.0) * 2.0); // For now, only do even radii + + if (newBlurRadius != _blurRadiusInPixels) + { + _blurRadiusInPixels = newBlurRadius; + + NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0]; + NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0]; + + // NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader); + // NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader); + // + [self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader]; + } + shouldResizeBlurRadiusWithImageSize = NO; +} + +@end + diff --git a/GPUImage/Source/GPUImageBrightnessFilter.h b/GPUImage/Source/GPUImageBrightnessFilter.h new file mode 100755 index 0000000..046473b --- /dev/null +++ b/GPUImage/Source/GPUImageBrightnessFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageFilter.h" + +@interface GPUImageBrightnessFilter : GPUImageFilter +{ + GLint brightnessUniform; +} + +// Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level +@property(readwrite, nonatomic) CGFloat brightness; + +@end diff --git a/GPUImage/Source/GPUImageBrightnessFilter.m b/GPUImage/Source/GPUImageBrightnessFilter.m new file mode 100755 index 0000000..7e526d8 --- /dev/null +++ b/GPUImage/Source/GPUImageBrightnessFilter.m @@ -0,0 +1,66 @@ +#import "GPUImageBrightnessFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float brightness; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w); + } +); +#else +NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float brightness; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w); + } + ); +#endif + +@implementation GPUImageBrightnessFilter + +@synthesize brightness = _brightness; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageBrightnessFragmentShaderString])) + { + return nil; + } + + brightnessUniform = [filterProgram uniformIndex:@"brightness"]; + self.brightness = 0.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBrightness:(CGFloat)newValue; +{ + _brightness = newValue; + + [self setFloat:_brightness forUniform:brightnessUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageBuffer.h b/GPUImage/Source/GPUImageBuffer.h new file mode 100644 index 0000000..8a9cd05 --- /dev/null +++ b/GPUImage/Source/GPUImageBuffer.h @@ -0,0 +1,10 @@ +#import "GPUImageFilter.h" + +@interface GPUImageBuffer : GPUImageFilter +{ + NSMutableArray *bufferedTextures; +} + +@property(readwrite, nonatomic) NSUInteger bufferSize; + +@end diff --git a/GPUImage/Source/GPUImageBuffer.m b/GPUImage/Source/GPUImageBuffer.m new file mode 100644 index 0000000..84b9d93 --- /dev/null +++ b/GPUImage/Source/GPUImageBuffer.m @@ -0,0 +1,194 @@ +#import "GPUImageBuffer.h" + +@interface GPUImageBuffer() + +//Texture management +- (GLuint)generateTexture; +- (void)removeTexture:(GLuint)textureToRemove; + +@end + +@implementation GPUImageBuffer + +@synthesize bufferSize = _bufferSize; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString])) + { + return nil; + } + + bufferedTextures = [[NSMutableArray alloc] init]; + [self initializeOutputTextureIfNeeded]; + [bufferedTextures addObject:[NSNumber numberWithInt:outputTexture]]; + _bufferSize = 1; + + return self; +} + +- (void)dealloc +{ + for (NSNumber *currentTextureName in bufferedTextures) + { + [self removeTexture:[currentTextureName intValue]]; + } +} + +#pragma mark - +#pragma mark GPUImageInput + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + static const GLfloat imageVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + [self notifyTargetsAboutNewOutputTexture]; + + // Let the downstream video elements see the previous frame from the buffer before rendering a new one into place + [self informTargetsAboutNewFrameAtTime:frameTime]; + + // Move the last frame to the back of the buffer, if needed + if (_bufferSize > 1) + { + NSNumber *lastTextureName = [bufferedTextures objectAtIndex:0]; + [bufferedTextures removeObjectAtIndex:0]; + [bufferedTextures addObject:lastTextureName]; + } + else + { + // Make sure the previous rendering has finished before enqueuing the current frame when simply delaying by one frame + glFinish(); + } + + // Render the new frame to the back of the buffer + [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + [GPUImageContext setActiveShaderProgram:filterProgram]; + [self setFilterFBO]; + + glBindTexture(GL_TEXTURE_2D, [[bufferedTextures lastObject] intValue]); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, [[bufferedTextures lastObject] intValue], 0); + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, sourceTexture); + + glUniform1i(filterInputTextureUniform, 2); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)prepareForImageCapture; +{ + // Disable this for now, until I figure out how to integrate the texture caches with a buffer like this +} + +#pragma mark - +#pragma mark Managing targets + +- (GLuint)textureForOutput; +{ + return [[bufferedTextures objectAtIndex:0] intValue]; +} + +#pragma mark - +#pragma mark Texture management + +- (GLuint)generateTexture; +{ + __block GLuint newTextureName = 0; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + glActiveTexture(GL_TEXTURE0); + glGenTextures(1, &newTextureName); + glBindTexture(GL_TEXTURE_2D, newTextureName); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + // This is necessary for non-power-of-two textures + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + + CGSize currentFBOSize = [self sizeOfFBO]; + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + (int)currentFBOSize.width, + (int)currentFBOSize.height, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, + 0); + glBindTexture(GL_TEXTURE_2D, 0); + + }); + + return newTextureName; +} + +- (void)removeTexture:(GLuint)textureToRemove; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + glDeleteTextures(1, &textureToRemove); + }); +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBufferSize:(NSUInteger)newValue; +{ + if ( (newValue == _bufferSize) || (newValue < 1) ) + { + return; + } + + if (newValue > _bufferSize) + { + NSUInteger texturesToAdd = newValue - _bufferSize; + for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToAdd; currentTextureIndex++) + { + [bufferedTextures addObject:[NSNumber numberWithInt:[self generateTexture]]]; + } + } + else + { + NSUInteger texturesToRemove = _bufferSize - newValue; + for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToRemove; currentTextureIndex++) + { + NSNumber *lastTextureName = [bufferedTextures lastObject]; + [bufferedTextures removeObjectAtIndex:([bufferedTextures count] - 1)]; + [self removeTexture:[lastTextureName intValue]]; + } + } + + _bufferSize = newValue; +} + +@end diff --git a/GPUImage/Source/GPUImageBulgeDistortionFilter.h b/GPUImage/Source/GPUImageBulgeDistortionFilter.h new file mode 100755 index 0000000..d416e53 --- /dev/null +++ b/GPUImage/Source/GPUImageBulgeDistortionFilter.h @@ -0,0 +1,16 @@ +#import "GPUImageFilter.h" + +/// Creates a bulge distortion on the image +@interface GPUImageBulgeDistortionFilter : GPUImageFilter +{ + GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform; +} + +/// The center about which to apply the distortion, with a default of (0.5, 0.5) +@property(readwrite, nonatomic) CGPoint center; +/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25 +@property(readwrite, nonatomic) CGFloat radius; +/// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5 +@property(readwrite, nonatomic) CGFloat scale; + +@end diff --git a/GPUImage/Source/GPUImageBulgeDistortionFilter.m b/GPUImage/Source/GPUImageBulgeDistortionFilter.m new file mode 100755 index 0000000..1f778ad --- /dev/null +++ b/GPUImage/Source/GPUImageBulgeDistortionFilter.m @@ -0,0 +1,174 @@ +#import "GPUImageBulgeDistortionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float aspectRatio; + uniform highp vec2 center; + uniform highp float radius; + uniform highp float scale; + + void main() + { + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float dist = distance(center, textureCoordinateToUse); + textureCoordinateToUse = textureCoordinate; + + if (dist < radius) + { + textureCoordinateToUse -= center; + highp float percent = 1.0 - ((radius - dist) / radius) * scale; + percent = percent * percent; + + textureCoordinateToUse = textureCoordinateToUse * percent; + textureCoordinateToUse += center; + } + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + } +); +#else +NSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float aspectRatio; + uniform vec2 center; + uniform float radius; + uniform float scale; + + void main() + { + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float dist = distance(center, textureCoordinateToUse); + textureCoordinateToUse = textureCoordinate; + + if (dist < radius) + { + textureCoordinateToUse -= center; + float percent = 1.0 - ((radius - dist) / radius) * scale; + percent = percent * percent; + + textureCoordinateToUse = textureCoordinateToUse * percent; + textureCoordinateToUse += center; + } + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + } +); +#endif + + +@interface GPUImageBulgeDistortionFilter () + +- (void)adjustAspectRatio; + +@property (readwrite, nonatomic) CGFloat aspectRatio; + +@end + +@implementation GPUImageBulgeDistortionFilter + +@synthesize aspectRatio = _aspectRatio; +@synthesize center = _center; +@synthesize radius = _radius; +@synthesize scale = _scale; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageBulgeDistortionFragmentShaderString])) + { + return nil; + } + + aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"]; + radiusUniform = [filterProgram uniformIndex:@"radius"]; + scaleUniform = [filterProgram uniformIndex:@"scale"]; + centerUniform = [filterProgram uniformIndex:@"center"]; + + self.radius = 0.25; + self.scale = 0.5; + self.center = CGPointMake(0.5, 0.5); + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)adjustAspectRatio; +{ + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)]; + } + else + { + [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)]; + } +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + [self adjustAspectRatio]; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self adjustAspectRatio]; + } +} + +- (void)setAspectRatio:(CGFloat)newValue; +{ + _aspectRatio = newValue; + + [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram]; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; + [self adjustAspectRatio]; +} + +- (void)setRadius:(CGFloat)newValue; +{ + _radius = newValue; + + [self setFloat:_radius forUniform:radiusUniform program:filterProgram]; +} + +- (void)setScale:(CGFloat)newValue; +{ + _scale = newValue; + + [self setFloat:_scale forUniform:scaleUniform program:filterProgram]; +} + +- (void)setCenter:(CGPoint)newValue; +{ + _center = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation]; + + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageCGAColorspaceFilter.h b/GPUImage/Source/GPUImageCGAColorspaceFilter.h new file mode 100755 index 0000000..4f97804 --- /dev/null +++ b/GPUImage/Source/GPUImageCGAColorspaceFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageFilter.h" + +@interface GPUImageCGAColorspaceFilter : GPUImageFilter + +@end diff --git a/GPUImage/Source/GPUImageCGAColorspaceFilter.m b/GPUImage/Source/GPUImageCGAColorspaceFilter.m new file mode 100755 index 0000000..eee939a --- /dev/null +++ b/GPUImage/Source/GPUImageCGAColorspaceFilter.m @@ -0,0 +1,113 @@ +// +// GPUImageCGAColorspaceFilter.m +// + +#import "GPUImageCGAColorspaceFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + highp vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0); + //highp vec4 colorDivisor = vec4(colorDepth); + + highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor); + highp vec4 color = texture2D(inputImageTexture, samplePos ); + + //gl_FragColor = texture2D(inputImageTexture, samplePos ); + mediump vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0); + mediump vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0); + mediump vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0); + mediump vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0); + + mediump vec4 endColor; + highp float blackDistance = distance(color, colorBlack); + highp float whiteDistance = distance(color, colorWhite); + highp float magentaDistance = distance(color, colorMagenta); + highp float cyanDistance = distance(color, colorCyan); + + mediump vec4 finalColor; + + highp float colorDistance = min(magentaDistance, cyanDistance); + colorDistance = min(colorDistance, whiteDistance); + colorDistance = min(colorDistance, blackDistance); + + if (colorDistance == blackDistance) { + finalColor = colorBlack; + } else if (colorDistance == whiteDistance) { + finalColor = colorWhite; + } else if (colorDistance == cyanDistance) { + finalColor = colorCyan; + } else { + finalColor = colorMagenta; + } + + gl_FragColor = finalColor; + } +); +#else +NSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0); + //highp vec4 colorDivisor = vec4(colorDepth); + + vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor); + vec4 color = texture2D(inputImageTexture, samplePos ); + + //gl_FragColor = texture2D(inputImageTexture, samplePos ); + vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0); + vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0); + vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0); + vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0); + + vec4 endColor; + float blackDistance = distance(color, colorBlack); + float whiteDistance = distance(color, colorWhite); + float magentaDistance = distance(color, colorMagenta); + float cyanDistance = distance(color, colorCyan); + + vec4 finalColor; + + float colorDistance = min(magentaDistance, cyanDistance); + colorDistance = min(colorDistance, whiteDistance); + colorDistance = min(colorDistance, blackDistance); + + if (colorDistance == blackDistance) { + finalColor = colorBlack; + } else if (colorDistance == whiteDistance) { + finalColor = colorWhite; + } else if (colorDistance == cyanDistance) { + finalColor = colorCyan; + } else { + finalColor = colorMagenta; + } + + gl_FragColor = finalColor; + } +); +#endif + +@implementation GPUImageCGAColorspaceFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageCGAColorspaceFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageCannyEdgeDetectionFilter.h b/GPUImage/Source/GPUImageCannyEdgeDetectionFilter.h new file mode 100755 index 0000000..e01a643 --- /dev/null +++ b/GPUImage/Source/GPUImageCannyEdgeDetectionFilter.h @@ -0,0 +1,62 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageGrayscaleFilter; +@class GPUImageSingleComponentGaussianBlurFilter; +@class GPUimageDirectionalSobelEdgeDetectionFilter; +@class GPUImageDirectionalNonMaximumSuppressionFilter; +@class GPUImageWeakPixelInclusionFilter; + +/** This applies the edge detection process described by John Canny in + + Canny, J., A Computational Approach To Edge Detection, IEEE Trans. Pattern Analysis and Machine Intelligence, 8(6):679–698, 1986. + + and implemented in OpenGL ES by + + A. Ensor, S. Hall. GPU-based Image Analysis on Mobile Devices. Proceedings of Image and Vision Computing New Zealand 2011. + + It starts with a conversion to luminance, followed by an accelerated 9-hit Gaussian blur. A Sobel operator is applied to obtain the overall + gradient strength in the blurred image, as well as the direction (in texture sampling steps) of the gradient. A non-maximum suppression filter + acts along the direction of the gradient, highlighting strong edges that pass the threshold and completely removing those that fail the lower + threshold. Finally, pixels from in-between these thresholds are either included in edges or rejected based on neighboring pixels. + */ +@interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup +{ + GPUImageGrayscaleFilter *luminanceFilter; + GPUImageSingleComponentGaussianBlurFilter *blurFilter; + GPUimageDirectionalSobelEdgeDetectionFilter *edgeDetectionFilter; + GPUImageDirectionalNonMaximumSuppressionFilter *nonMaximumSuppressionFilter; + GPUImageWeakPixelInclusionFilter *weakPixelInclusionFilter; +} + +/** The image width and height factors tweak the appearance of the edges. + + These parameters affect the visibility of the detected edges + + By default, they match the inverse of the filter size in pixels + */ +@property(readwrite, nonatomic) CGFloat texelWidth; +/** The image width and height factors tweak the appearance of the edges. + + These parameters affect the visibility of the detected edges + + By default, they match the inverse of the filter size in pixels + */ +@property(readwrite, nonatomic) CGFloat texelHeight; + +/** The underlying blur radius for the Gaussian blur. Default is 2.0. + */ +@property (readwrite, nonatomic) CGFloat blurRadiusInPixels; + +/** The underlying blur texel spacing multiplier. Default is 1.0. + */ +@property (readwrite, nonatomic) CGFloat blurTexelSpacingMultiplier; + +/** Any edge with a gradient magnitude above this threshold will pass and show up in the final result. + */ +@property(readwrite, nonatomic) CGFloat upperThreshold; + +/** Any edge with a gradient magnitude below this threshold will fail and be removed from the final result. + */ +@property(readwrite, nonatomic) CGFloat lowerThreshold; + +@end diff --git a/GPUImage/Source/GPUImageCannyEdgeDetectionFilter.m b/GPUImage/Source/GPUImageCannyEdgeDetectionFilter.m new file mode 100755 index 0000000..b566e77 --- /dev/null +++ b/GPUImage/Source/GPUImageCannyEdgeDetectionFilter.m @@ -0,0 +1,125 @@ +#import "GPUImageCannyEdgeDetectionFilter.h" + +#import "GPUImageGrayscaleFilter.h" +#import "GPUImageDirectionalSobelEdgeDetectionFilter.h" +#import "GPUImageDirectionalNonMaximumSuppressionFilter.h" +#import "GPUImageWeakPixelInclusionFilter.h" +#import "GPUImageSingleComponentGaussianBlurFilter.h" + +@implementation GPUImageCannyEdgeDetectionFilter + +@synthesize upperThreshold; +@synthesize lowerThreshold; +@synthesize blurRadiusInPixels; +@synthesize blurTexelSpacingMultiplier; +@synthesize texelWidth; +@synthesize texelHeight; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: convert image to luminance + luminanceFilter = [[GPUImageGrayscaleFilter alloc] init]; + [self addFilter:luminanceFilter]; + + // Second pass: apply a variable Gaussian blur + blurFilter = [[GPUImageSingleComponentGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + + // Third pass: run the Sobel edge detection, with calculated gradient directions, on this blurred image + edgeDetectionFilter = [[GPUimageDirectionalSobelEdgeDetectionFilter alloc] init]; + [self addFilter:edgeDetectionFilter]; + + // Fourth pass: apply non-maximum suppression + nonMaximumSuppressionFilter = [[GPUImageDirectionalNonMaximumSuppressionFilter alloc] init]; + [self addFilter:nonMaximumSuppressionFilter]; + + // Fifth pass: include weak pixels to complete edges + weakPixelInclusionFilter = [[GPUImageWeakPixelInclusionFilter alloc] init]; + [self addFilter:weakPixelInclusionFilter]; + + [luminanceFilter addTarget:blurFilter]; + [blurFilter addTarget:edgeDetectionFilter]; + [edgeDetectionFilter addTarget:nonMaximumSuppressionFilter]; + [nonMaximumSuppressionFilter addTarget:weakPixelInclusionFilter]; + + self.initialFilters = [NSArray arrayWithObject:luminanceFilter]; +// self.terminalFilter = nonMaximumSuppressionFilter; + self.terminalFilter = weakPixelInclusionFilter; + + self.blurRadiusInPixels = 2.0; + self.blurTexelSpacingMultiplier = 1.0; + self.upperThreshold = 0.4; + self.lowerThreshold = 0.1; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setBlurTexelSpacingMultiplier:(CGFloat)newValue; +{ + blurFilter.texelSpacingMultiplier = newValue; +} + +- (CGFloat)blurTexelSpacingMultiplier; +{ + return blurFilter.texelSpacingMultiplier; +} + +- (void)setTexelWidth:(CGFloat)newValue; +{ + edgeDetectionFilter.texelWidth = newValue; +} + +- (CGFloat)texelWidth; +{ + return edgeDetectionFilter.texelWidth; +} + +- (void)setTexelHeight:(CGFloat)newValue; +{ + edgeDetectionFilter.texelHeight = newValue; +} + +- (CGFloat)texelHeight; +{ + return edgeDetectionFilter.texelHeight; +} + +- (void)setUpperThreshold:(CGFloat)newValue; +{ + nonMaximumSuppressionFilter.upperThreshold = newValue; +} + +- (CGFloat)upperThreshold; +{ + return nonMaximumSuppressionFilter.upperThreshold; +} + +- (void)setLowerThreshold:(CGFloat)newValue; +{ + nonMaximumSuppressionFilter.lowerThreshold = newValue; +} + +- (CGFloat)lowerThreshold; +{ + return nonMaximumSuppressionFilter.lowerThreshold; +} + +@end diff --git a/GPUImage/Source/GPUImageChromaKeyBlendFilter.h b/GPUImage/Source/GPUImageChromaKeyBlendFilter.h new file mode 100755 index 0000000..47f5acb --- /dev/null +++ b/GPUImage/Source/GPUImageChromaKeyBlendFilter.h @@ -0,0 +1,32 @@ +#import "GPUImageTwoInputFilter.h" + +/** Selectively replaces a color in the first image with the second image + */ +@interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter +{ + GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform; +} + +/** The threshold sensitivity controls how similar pixels need to be colored to be replaced + + The default value is 0.3 + */ +@property(readwrite, nonatomic) GLfloat thresholdSensitivity; + +/** The degree of smoothing controls how gradually similar colors are replaced in the image + + The default value is 0.1 + */ +@property(readwrite, nonatomic) GLfloat smoothing; + +/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0). + + The default is green: (0.0, 1.0, 0.0). + + @param redComponent Red component of color to be replaced + @param greenComponent Green component of color to be replaced + @param blueComponent Blue component of color to be replaced + */ +- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; + +@end diff --git a/GPUImage/Source/GPUImageChromaKeyBlendFilter.m b/GPUImage/Source/GPUImageChromaKeyBlendFilter.m new file mode 100755 index 0000000..d6b2d72 --- /dev/null +++ b/GPUImage/Source/GPUImageChromaKeyBlendFilter.m @@ -0,0 +1,117 @@ +#import "GPUImageChromaKeyBlendFilter.h" + +// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple.com/library/mac/#samplecode/CIChromaKeyFilter/Introduction/Intro.html + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform float thresholdSensitivity; + uniform float smoothing; + uniform vec3 colorToReplace; + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b; + float maskCr = 0.7132 * (colorToReplace.r - maskY); + float maskCb = 0.5647 * (colorToReplace.b - maskY); + + float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b; + float Cr = 0.7132 * (textureColor.r - Y); + float Cb = 0.5647 * (textureColor.b - Y); + +// float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb)); + float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb))); + gl_FragColor = mix(textureColor, textureColor2, blendValue); + } +); +#else +NSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform float thresholdSensitivity; + uniform float smoothing; + uniform vec3 colorToReplace; + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b; + float maskCr = 0.7132 * (colorToReplace.r - maskY); + float maskCb = 0.5647 * (colorToReplace.b - maskY); + + float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b; + float Cr = 0.7132 * (textureColor.r - Y); + float Cb = 0.5647 * (textureColor.b - Y); + + // float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb)); + float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb))); + gl_FragColor = mix(textureColor, textureColor2, blendValue); + } +); +#endif + +@implementation GPUImageChromaKeyBlendFilter + +@synthesize thresholdSensitivity = _thresholdSensitivity; +@synthesize smoothing = _smoothing; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyBlendFragmentShaderString])) + { + return nil; + } + + thresholdSensitivityUniform = [filterProgram uniformIndex:@"thresholdSensitivity"]; + smoothingUniform = [filterProgram uniformIndex:@"smoothing"]; + colorToReplaceUniform = [filterProgram uniformIndex:@"colorToReplace"]; + + self.thresholdSensitivity = 0.4; + self.smoothing = 0.1; + [self setColorToReplaceRed:0.0 green:1.0 blue:0.0]; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent}; + + [self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram]; +} + +- (void)setThresholdSensitivity:(GLfloat)newValue; +{ + _thresholdSensitivity = newValue; + + [self setFloat:_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram]; +} + +- (void)setSmoothing:(GLfloat)newValue; +{ + _smoothing = newValue; + + [self setFloat:_smoothing forUniform:smoothingUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageChromaKeyFilter.h b/GPUImage/Source/GPUImageChromaKeyFilter.h new file mode 100644 index 0000000..a3d073c --- /dev/null +++ b/GPUImage/Source/GPUImageChromaKeyFilter.h @@ -0,0 +1,30 @@ +#import "GPUImageFilter.h" + +@interface GPUImageChromaKeyFilter : GPUImageFilter +{ + GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform; +} + +/** The threshold sensitivity controls how similar pixels need to be colored to be replaced + + The default value is 0.3 + */ +@property(readwrite, nonatomic) GLfloat thresholdSensitivity; + +/** The degree of smoothing controls how gradually similar colors are replaced in the image + + The default value is 0.1 + */ +@property(readwrite, nonatomic) GLfloat smoothing; + +/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0). + + The default is green: (0.0, 1.0, 0.0). + + @param redComponent Red component of color to be replaced + @param greenComponent Green component of color to be replaced + @param blueComponent Blue component of color to be replaced + */ +- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; + +@end diff --git a/GPUImage/Source/GPUImageChromaKeyFilter.m b/GPUImage/Source/GPUImageChromaKeyFilter.m new file mode 100644 index 0000000..7ee0d67 --- /dev/null +++ b/GPUImage/Source/GPUImageChromaKeyFilter.m @@ -0,0 +1,113 @@ +#import "GPUImageChromaKeyFilter.h" + +// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple.com/library/mac/#samplecode/CIChromaKeyFilter/Introduction/Intro.html + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying highp vec2 textureCoordinate; + + uniform float thresholdSensitivity; + uniform float smoothing; + uniform vec3 colorToReplace; + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b; + float maskCr = 0.7132 * (colorToReplace.r - maskY); + float maskCb = 0.5647 * (colorToReplace.b - maskY); + + float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b; + float Cr = 0.7132 * (textureColor.r - Y); + float Cb = 0.5647 * (textureColor.b - Y); + + // float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb)); + float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb))); + gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue); + } +); +#else +NSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform float thresholdSensitivity; + uniform float smoothing; + uniform vec3 colorToReplace; + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b; + float maskCr = 0.7132 * (colorToReplace.r - maskY); + float maskCb = 0.5647 * (colorToReplace.b - maskY); + + float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b; + float Cr = 0.7132 * (textureColor.r - Y); + float Cb = 0.5647 * (textureColor.b - Y); + + // float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb)); + float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb))); + gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue); + } + ); +#endif + +@implementation GPUImageChromaKeyFilter + +@synthesize thresholdSensitivity = _thresholdSensitivity; +@synthesize smoothing = _smoothing; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyFragmentShaderString])) + { + return nil; + } + + thresholdSensitivityUniform = [filterProgram uniformIndex:@"thresholdSensitivity"]; + smoothingUniform = [filterProgram uniformIndex:@"smoothing"]; + colorToReplaceUniform = [filterProgram uniformIndex:@"colorToReplace"]; + + self.thresholdSensitivity = 0.4; + self.smoothing = 0.1; + [self setColorToReplaceRed:0.0 green:1.0 blue:0.0]; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent}; + + [self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram]; +} + +- (void)setThresholdSensitivity:(GLfloat)newValue; +{ + _thresholdSensitivity = newValue; + + [self setFloat:_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram]; +} + +- (void)setSmoothing:(GLfloat)newValue; +{ + _smoothing = newValue; + + [self setFloat:_smoothing forUniform:smoothingUniform program:filterProgram]; +} + + +@end diff --git a/GPUImage/Source/GPUImageClosingFilter.h b/GPUImage/Source/GPUImageClosingFilter.h new file mode 100644 index 0000000..61e34c4 --- /dev/null +++ b/GPUImage/Source/GPUImageClosingFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageErosionFilter; +@class GPUImageDilationFilter; + +// A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius. +// This helps to filter out smaller dark elements. + +@interface GPUImageClosingFilter : GPUImageFilterGroup +{ + GPUImageErosionFilter *erosionFilter; + GPUImageDilationFilter *dilationFilter; +} + +@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing; + +- (id)initWithRadius:(NSUInteger)radius; + +@end diff --git a/GPUImage/Source/GPUImageClosingFilter.m b/GPUImage/Source/GPUImageClosingFilter.m new file mode 100644 index 0000000..01e9b29 --- /dev/null +++ b/GPUImage/Source/GPUImageClosingFilter.m @@ -0,0 +1,57 @@ +#import "GPUImageClosingFilter.h" +#import "GPUImageErosionFilter.h" +#import "GPUImageDilationFilter.h" + +@implementation GPUImageClosingFilter + +@synthesize verticalTexelSpacing = _verticalTexelSpacing; +@synthesize horizontalTexelSpacing = _horizontalTexelSpacing; + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +- (id)initWithRadius:(NSUInteger)radius; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: dilation + dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius]; + [self addFilter:dilationFilter]; + + // Second pass: erosion + erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius]; + [self addFilter:erosionFilter]; + + [dilationFilter addTarget:erosionFilter]; + + self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil]; + self.terminalFilter = erosionFilter; + + return self; +} + +- (void)setVerticalTexelSpacing:(CGFloat)newValue; +{ + _verticalTexelSpacing = newValue; + erosionFilter.verticalTexelSpacing = newValue; + dilationFilter.verticalTexelSpacing = newValue; +} + +- (void)setHorizontalTexelSpacing:(CGFloat)newValue; +{ + _horizontalTexelSpacing = newValue; + erosionFilter.horizontalTexelSpacing = newValue; + dilationFilter.horizontalTexelSpacing = newValue; +} + +@end diff --git a/GPUImage/Source/GPUImageColorBlendFilter.h b/GPUImage/Source/GPUImageColorBlendFilter.h new file mode 100644 index 0000000..302a16c --- /dev/null +++ b/GPUImage/Source/GPUImageColorBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageColorBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageColorBlendFilter.m b/GPUImage/Source/GPUImageColorBlendFilter.m new file mode 100644 index 0000000..ced1cb8 --- /dev/null +++ b/GPUImage/Source/GPUImageColorBlendFilter.m @@ -0,0 +1,113 @@ +#import "GPUImageColorBlendFilter.h" + +/** + * Color blend mode based upon pseudo code from the PDF specification. + */ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + highp float lum(lowp vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + lowp vec3 clipcolor(lowp vec3 c) { + highp float l = lum(c); + lowp float n = min(min(c.r, c.g), c.b); + lowp float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + lowp vec3 setlum(lowp vec3 c, highp float l) { + highp float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + void main() + { + highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#else +NSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + float lum(vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + vec3 clipcolor(vec3 c) { + float l = lum(c); + float n = min(min(c.r, c.g), c.b); + float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + vec3 setlum(vec3 c, float l) { + float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + void main() + { + vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#endif + + +@implementation GPUImageColorBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageColorBurnBlendFilter.h b/GPUImage/Source/GPUImageColorBurnBlendFilter.h new file mode 100755 index 0000000..50ebb3f --- /dev/null +++ b/GPUImage/Source/GPUImageColorBurnBlendFilter.h @@ -0,0 +1,9 @@ +#import "GPUImageTwoInputFilter.h" + +/** Applies a color burn blend of two images + */ +@interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageColorBurnBlendFilter.m b/GPUImage/Source/GPUImageColorBurnBlendFilter.m new file mode 100755 index 0000000..5d6ff60 --- /dev/null +++ b/GPUImage/Source/GPUImageColorBurnBlendFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageColorBurnBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + mediump vec4 whiteColor = vec4(1.0); + gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2; + } +); +#else +NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + vec4 whiteColor = vec4(1.0); + gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2; + } +); +#endif + +@implementation GPUImageColorBurnBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBurnBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageColorDodgeBlendFilter.h b/GPUImage/Source/GPUImageColorDodgeBlendFilter.h new file mode 100755 index 0000000..0f541c4 --- /dev/null +++ b/GPUImage/Source/GPUImageColorDodgeBlendFilter.h @@ -0,0 +1,9 @@ +#import "GPUImageTwoInputFilter.h" + +/** Applies a color dodge blend of two images + */ +@interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageColorDodgeBlendFilter.m b/GPUImage/Source/GPUImageColorDodgeBlendFilter.m new file mode 100755 index 0000000..6a38827 --- /dev/null +++ b/GPUImage/Source/GPUImageColorDodgeBlendFilter.m @@ -0,0 +1,75 @@ +#import "GPUImageColorDodgeBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING +( + + precision mediump float; + + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a); + vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a); + + vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct; + vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99); + + vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct; + + vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct); + + gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0); + } +); +#else +NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a); + vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a); + + vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct; + vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99); + + vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct; + + vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct); + + gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0); + } +); +#endif + +@implementation GPUImageColorDodgeBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorDodgeBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageColorInvertFilter.h b/GPUImage/Source/GPUImageColorInvertFilter.h new file mode 100755 index 0000000..aaeec43 --- /dev/null +++ b/GPUImage/Source/GPUImageColorInvertFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageFilter.h" + +@interface GPUImageColorInvertFilter : GPUImageFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageColorInvertFilter.m b/GPUImage/Source/GPUImageColorInvertFilter.m new file mode 100755 index 0000000..0a8798b --- /dev/null +++ b/GPUImage/Source/GPUImageColorInvertFilter.m @@ -0,0 +1,46 @@ +#import "GPUImageColorInvertFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w); + } +); +#else +NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w); + } + ); +#endif + +@implementation GPUImageColorInvertFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageColorMatrixFilter.h b/GPUImage/Source/GPUImageColorMatrixFilter.h new file mode 100755 index 0000000..7588727 --- /dev/null +++ b/GPUImage/Source/GPUImageColorMatrixFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageFilter.h" + +/** Transforms the colors of an image by applying a matrix to them + */ +@interface GPUImageColorMatrixFilter : GPUImageFilter +{ + GLint colorMatrixUniform; + GLint intensityUniform; +} + +/** A 4x4 matrix used to transform each color in an image + */ +@property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix; + +/** The degree to which the new transformed color replaces the original color for each pixel + */ +@property(readwrite, nonatomic) CGFloat intensity; + +@end diff --git a/GPUImage/Source/GPUImageColorMatrixFilter.m b/GPUImage/Source/GPUImageColorMatrixFilter.m new file mode 100755 index 0000000..0e21c7e --- /dev/null +++ b/GPUImage/Source/GPUImageColorMatrixFilter.m @@ -0,0 +1,87 @@ +#import "GPUImageColorMatrixFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform lowp mat4 colorMatrix; + uniform lowp float intensity; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 outputColor = textureColor * colorMatrix; + + gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor); + } +); +#else +NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform mat4 colorMatrix; + uniform float intensity; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 outputColor = textureColor * colorMatrix; + + gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor); + } +); +#endif + +@implementation GPUImageColorMatrixFilter + +@synthesize intensity = _intensity; +@synthesize colorMatrix = _colorMatrix; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorMatrixFragmentShaderString])) + { + return nil; + } + + colorMatrixUniform = [filterProgram uniformIndex:@"colorMatrix"]; + intensityUniform = [filterProgram uniformIndex:@"intensity"]; + + self.intensity = 1.f; + self.colorMatrix = (GPUMatrix4x4){ + {1.f, 0.f, 0.f, 0.f}, + {0.f, 1.f, 0.f, 0.f}, + {0.f, 0.f, 1.f, 0.f}, + {0.f, 0.f, 0.f, 1.f} + }; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setIntensity:(CGFloat)newIntensity; +{ + _intensity = newIntensity; + + [self setFloat:_intensity forUniform:intensityUniform program:filterProgram]; +} + +- (void)setColorMatrix:(GPUMatrix4x4)newColorMatrix; +{ + _colorMatrix = newColorMatrix; + + [self setMatrix4f:_colorMatrix forUniform:colorMatrixUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageColorPackingFilter.h b/GPUImage/Source/GPUImageColorPackingFilter.h new file mode 100644 index 0000000..c2edca5 --- /dev/null +++ b/GPUImage/Source/GPUImageColorPackingFilter.h @@ -0,0 +1,10 @@ +#import "GPUImageFilter.h" + +@interface GPUImageColorPackingFilter : GPUImageFilter +{ + GLint texelWidthUniform, texelHeightUniform; + + CGFloat texelWidth, texelHeight; +} + +@end diff --git a/GPUImage/Source/GPUImageColorPackingFilter.m b/GPUImage/Source/GPUImageColorPackingFilter.m new file mode 100644 index 0000000..1a087ca --- /dev/null +++ b/GPUImage/Source/GPUImageColorPackingFilter.m @@ -0,0 +1,139 @@ +#import "GPUImageColorPackingFilter.h" + +NSString *const kGPUImageColorPackingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform float texelWidth; + uniform float texelHeight; + + varying vec2 upperLeftInputTextureCoordinate; + varying vec2 upperRightInputTextureCoordinate; + varying vec2 lowerLeftInputTextureCoordinate; + varying vec2 lowerRightInputTextureCoordinate; + + void main() + { + gl_Position = position; + + upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight); + upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight); + lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight); + lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight); + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING +( + precision lowp float; + + uniform sampler2D inputImageTexture; + + uniform mediump mat3 convolutionMatrix; + + varying highp vec2 outputTextureCoordinate; + + varying highp vec2 upperLeftInputTextureCoordinate; + varying highp vec2 upperRightInputTextureCoordinate; + varying highp vec2 lowerLeftInputTextureCoordinate; + varying highp vec2 lowerRightInputTextureCoordinate; + + void main() + { + float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r; + float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r; + float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r; + float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r; + + gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity); + } +); +#else +NSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + uniform mat3 convolutionMatrix; + + varying vec2 outputTextureCoordinate; + + varying vec2 upperLeftInputTextureCoordinate; + varying vec2 upperRightInputTextureCoordinate; + varying vec2 lowerLeftInputTextureCoordinate; + varying vec2 lowerRightInputTextureCoordinate; + + void main() + { + float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r; + float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r; + float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r; + float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r; + + gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity); + } +); +#endif + +@implementation GPUImageColorPackingFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageColorPackingVertexShaderString fragmentShaderFromString:kGPUImageColorPackingFragmentShaderString])) + { + return nil; + } + + texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"]; + + return self; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + texelWidth = 0.5 / inputTextureSize.width; + texelHeight = 0.5 / inputTextureSize.height; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + glUniform1f(texelWidthUniform, texelWidth); + glUniform1f(texelHeightUniform, texelHeight); + }); +} + +#pragma mark - +#pragma mark Managing the display FBOs + +- (CGSize)sizeOfFBO; +{ + CGSize outputSize = [self maximumOutputSize]; + if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) ) + { + CGSize quarterSize; + quarterSize.width = inputTextureSize.width / 2.0; + quarterSize.height = inputTextureSize.height / 2.0; + return quarterSize; + } + else + { + return outputSize; + } +} + +#pragma mark - +#pragma mark Rendering + +- (CGSize)outputFrameSize; +{ + CGSize quarterSize; + quarterSize.width = inputTextureSize.width / 2.0; + quarterSize.height = inputTextureSize.height / 2.0; + return quarterSize; +} + +@end diff --git a/GPUImage/Source/GPUImageContrastFilter.h b/GPUImage/Source/GPUImageContrastFilter.h new file mode 100755 index 0000000..e09e6dc --- /dev/null +++ b/GPUImage/Source/GPUImageContrastFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilter.h" + +/** Adjusts the contrast of the image + */ +@interface GPUImageContrastFilter : GPUImageFilter +{ + GLint contrastUniform; +} + +/** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level + */ +@property(readwrite, nonatomic) CGFloat contrast; + +@end diff --git a/GPUImage/Source/GPUImageContrastFilter.m b/GPUImage/Source/GPUImageContrastFilter.m new file mode 100755 index 0000000..5ed1e22 --- /dev/null +++ b/GPUImage/Source/GPUImageContrastFilter.m @@ -0,0 +1,66 @@ +#import "GPUImageContrastFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float contrast; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w); + } +); +#else +NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float contrast; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w); + } + ); +#endif + +@implementation GPUImageContrastFilter + +@synthesize contrast = _contrast; + +#pragma mark - +#pragma mark Initialization + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageContrastFragmentShaderString])) + { + return nil; + } + + contrastUniform = [filterProgram uniformIndex:@"contrast"]; + self.contrast = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setContrast:(CGFloat)newValue; +{ + _contrast = newValue; + + [self setFloat:_contrast forUniform:contrastUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageCropFilter.h b/GPUImage/Source/GPUImageCropFilter.h new file mode 100755 index 0000000..641fb7b --- /dev/null +++ b/GPUImage/Source/GPUImageCropFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilter.h" + +@interface GPUImageCropFilter : GPUImageFilter +{ + GLfloat cropTextureCoordinates[8]; +} + +// The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image +@property(readwrite, nonatomic) CGRect cropRegion; + +// Initialization and teardown +- (id)initWithCropRegion:(CGRect)newCropRegion; + +@end diff --git a/GPUImage/Source/GPUImageCropFilter.m b/GPUImage/Source/GPUImageCropFilter.m new file mode 100755 index 0000000..a783a1c --- /dev/null +++ b/GPUImage/Source/GPUImageCropFilter.m @@ -0,0 +1,277 @@ +#import "GPUImageCropFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageCropFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } +); +#else +NSString *const kGPUImageCropFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } +); +#endif + +@interface GPUImageCropFilter () + +- (void)calculateCropTextureCoordinates; + +@end + +@interface GPUImageCropFilter() +{ + CGSize originallySuppliedInputSize; +} + +@end + +@implementation GPUImageCropFilter + +@synthesize cropRegion = _cropRegion; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithCropRegion:(CGRect)newCropRegion; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageCropFragmentShaderString])) + { + return nil; + } + + self.cropRegion = newCropRegion; + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithCropRegion:CGRectMake(0.0, 0.0, 1.0, 1.0)])) + { + return nil; + } + + return self; +} + +#pragma mark - +#pragma mark Rendering + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + if (self.preventRendering) + { + return; + } + +// if (overrideInputSize) +// { +// if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero)) +// { +// return; +// } +// else +// { +// CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height)); +// inputTextureSize = insetRect.size; +// return; +// } +// } + + CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex]; + originallySuppliedInputSize = rotatedSize; + + CGSize scaledSize; + scaledSize.width = rotatedSize.width * _cropRegion.size.width; + scaledSize.height = rotatedSize.height * _cropRegion.size.height; + + + if (CGSizeEqualToSize(scaledSize, CGSizeZero)) + { + inputTextureSize = scaledSize; + } + else if (!CGSizeEqualToSize(inputTextureSize, scaledSize)) + { + inputTextureSize = scaledSize; + [self recreateFilterFBO]; + } +} + +#pragma mark - +#pragma mark GPUImageInput + +- (void)calculateCropTextureCoordinates; +{ + CGFloat minX = _cropRegion.origin.x; + CGFloat minY = _cropRegion.origin.y; + CGFloat maxX = CGRectGetMaxX(_cropRegion); + CGFloat maxY = CGRectGetMaxY(_cropRegion); + + switch(inputRotation) + { + case kGPUImageNoRotation: // Works + { + cropTextureCoordinates[0] = minX; // 0,0 + cropTextureCoordinates[1] = minY; + + cropTextureCoordinates[2] = maxX; // 1,0 + cropTextureCoordinates[3] = minY; + + cropTextureCoordinates[4] = minX; // 0,1 + cropTextureCoordinates[5] = maxY; + + cropTextureCoordinates[6] = maxX; // 1,1 + cropTextureCoordinates[7] = maxY; + }; break; + case kGPUImageRotateLeft: // Fixed + { + cropTextureCoordinates[0] = maxY; // 1,0 + cropTextureCoordinates[1] = 1.0 - maxX; + + cropTextureCoordinates[2] = maxY; // 1,1 + cropTextureCoordinates[3] = 1.0 - minX; + + cropTextureCoordinates[4] = minY; // 0,0 + cropTextureCoordinates[5] = 1.0 - maxX; + + cropTextureCoordinates[6] = minY; // 0,1 + cropTextureCoordinates[7] = 1.0 - minX; + }; break; + case kGPUImageRotateRight: // Fixed + { + cropTextureCoordinates[0] = minY; // 0,1 + cropTextureCoordinates[1] = 1.0 - minX; + + cropTextureCoordinates[2] = minY; // 0,0 + cropTextureCoordinates[3] = 1.0 - maxX; + + cropTextureCoordinates[4] = maxY; // 1,1 + cropTextureCoordinates[5] = 1.0 - minX; + + cropTextureCoordinates[6] = maxY; // 1,0 + cropTextureCoordinates[7] = 1.0 - maxX; + }; break; + case kGPUImageFlipVertical: // Works for me + { + cropTextureCoordinates[0] = minX; // 0,1 + cropTextureCoordinates[1] = maxY; + + cropTextureCoordinates[2] = maxX; // 1,1 + cropTextureCoordinates[3] = maxY; + + cropTextureCoordinates[4] = minX; // 0,0 + cropTextureCoordinates[5] = minY; + + cropTextureCoordinates[6] = maxX; // 1,0 + cropTextureCoordinates[7] = minY; + }; break; + case kGPUImageFlipHorizonal: // Works for me + { + cropTextureCoordinates[0] = maxX; // 1,0 + cropTextureCoordinates[1] = minY; + + cropTextureCoordinates[2] = minX; // 0,0 + cropTextureCoordinates[3] = minY; + + cropTextureCoordinates[4] = maxX; // 1,1 + cropTextureCoordinates[5] = maxY; + + cropTextureCoordinates[6] = minX; // 0,1 + cropTextureCoordinates[7] = maxY; + }; break; + case kGPUImageRotate180: // Fixed + { + cropTextureCoordinates[0] = maxX; // 1,1 + cropTextureCoordinates[1] = maxY; + + cropTextureCoordinates[2] = minX; // 0,1 + cropTextureCoordinates[3] = maxY; + + cropTextureCoordinates[4] = maxX; // 1,0 + cropTextureCoordinates[5] = minY; + + cropTextureCoordinates[6] = minX; // 0,0 + cropTextureCoordinates[7] = minY; + }; break; + case kGPUImageRotateRightFlipVertical: // Fixed + { + cropTextureCoordinates[0] = minY; // 0,0 + cropTextureCoordinates[1] = 1.0 - maxX; + + cropTextureCoordinates[2] = minY; // 0,1 + cropTextureCoordinates[3] = 1.0 - minX; + + cropTextureCoordinates[4] = maxY; // 1,0 + cropTextureCoordinates[5] = 1.0 - maxX; + + cropTextureCoordinates[6] = maxY; // 1,1 + cropTextureCoordinates[7] = 1.0 - minX; + }; break; + case kGPUImageRotateRightFlipHorizontal: // Fixed + { + cropTextureCoordinates[0] = maxY; // 1,1 + cropTextureCoordinates[1] = 1.0 - minX; + + cropTextureCoordinates[2] = maxY; // 1,0 + cropTextureCoordinates[3] = 1.0 - maxX; + + cropTextureCoordinates[4] = minY; // 0,1 + cropTextureCoordinates[5] = 1.0 - minX; + + cropTextureCoordinates[6] = minY; // 0,0 + cropTextureCoordinates[7] = 1.0 - maxX; + }; break; + } +} + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + static const GLfloat cropSquareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + [self renderToTextureWithVertices:cropSquareVertices textureCoordinates:cropTextureCoordinates sourceTexture:filterSourceTexture]; + + [self informTargetsAboutNewFrameAtTime:frameTime]; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setCropRegion:(CGRect)newValue; +{ + NSParameterAssert(newValue.origin.x >= 0 && newValue.origin.x <= 1 && + newValue.origin.y >= 0 && newValue.origin.y <= 1 && + newValue.size.width >= 0 && newValue.size.width <= 1 && + newValue.size.height >= 0 && newValue.size.height <= 1); + + _cropRegion = newValue; + [self calculateCropTextureCoordinates]; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self calculateCropTextureCoordinates]; +} + +@end diff --git a/GPUImage/Source/GPUImageCrosshairGenerator.h b/GPUImage/Source/GPUImageCrosshairGenerator.h new file mode 100644 index 0000000..569774f --- /dev/null +++ b/GPUImage/Source/GPUImageCrosshairGenerator.h @@ -0,0 +1,17 @@ +#import "GPUImageFilter.h" + +@interface GPUImageCrosshairGenerator : GPUImageFilter +{ + GLint crosshairWidthUniform, crosshairColorUniform; +} + +// The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5. +@property(readwrite, nonatomic) CGFloat crosshairWidth; + +// The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0). +- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; + +// Rendering +- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime; + +@end diff --git a/GPUImage/Source/GPUImageCrosshairGenerator.m b/GPUImage/Source/GPUImageCrosshairGenerator.m new file mode 100644 index 0000000..f127dac --- /dev/null +++ b/GPUImage/Source/GPUImageCrosshairGenerator.m @@ -0,0 +1,138 @@ +#import "GPUImageCrosshairGenerator.h" + +NSString *const kGPUImageCrosshairVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + uniform float crosshairWidth; + + varying vec2 centerLocation; + varying float pointSpacing; + + void main() + { + gl_Position = vec4(((position.xy * 2.0) - 1.0), 0.0, 1.0); + gl_PointSize = crosshairWidth + 1.0; + pointSpacing = 1.0 / crosshairWidth; + centerLocation = vec2(pointSpacing * ceil(crosshairWidth / 2.0), pointSpacing * ceil(crosshairWidth / 2.0)); + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING +( + uniform lowp vec3 crosshairColor; + + varying highp vec2 centerLocation; + varying highp float pointSpacing; + + void main() + { + lowp vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy); + lowp float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09); + + gl_FragColor = vec4(crosshairColor * axisTest, axisTest); +// gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0); + } +); +#else +NSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING +( + GPUImageEscapedHashIdentifier(version 120)\n + + uniform vec3 crosshairColor; + + varying vec2 centerLocation; + varying float pointSpacing; + + void main() + { + vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy); + float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09); + + gl_FragColor = vec4(crosshairColor * axisTest, axisTest); + // gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0); + } +); +#endif + +@implementation GPUImageCrosshairGenerator + +@synthesize crosshairWidth = _crosshairWidth; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageCrosshairVertexShaderString fragmentShaderFromString:kGPUImageCrosshairFragmentShaderString])) + { + return nil; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + crosshairWidthUniform = [filterProgram uniformIndex:@"crosshairWidth"]; + crosshairColorUniform = [filterProgram uniformIndex:@"crosshairColor"]; + + self.crosshairWidth = 5.0; + [self setCrosshairColorRed:0.0 green:1.0 blue:0.0]; + }); + + return self; +} + +#pragma mark - +#pragma mark Rendering + +- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime; +{ + if (self.preventRendering) + { + return; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#else + glEnable(GL_POINT_SPRITE); + glEnable(GL_VERTEX_PROGRAM_POINT_SIZE); +#endif + + [self setFilterFBO]; + + glClearColor(0.0, 0.0, 0.0, 0.0); + glClear(GL_COLOR_BUFFER_BIT); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, crosshairCoordinates); + + glDrawArrays(GL_POINTS, 0, (GLsizei)numberOfCrosshairs); + + [self informTargetsAboutNewFrameAtTime:frameTime]; + }); +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + // Prevent rendering of the frame by normal means +} + +#pragma mark - +#pragma mark Accessors + +- (void)setCrosshairWidth:(CGFloat)newValue; +{ + _crosshairWidth = newValue; + + [self setFloat:_crosshairWidth forUniform:crosshairWidthUniform program:filterProgram]; +} + +- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 crosshairColor = {redComponent, greenComponent, blueComponent}; + + [self setVec3:crosshairColor forUniform:crosshairColorUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageCrosshatchFilter.h b/GPUImage/Source/GPUImageCrosshatchFilter.h new file mode 100755 index 0000000..dab1896 --- /dev/null +++ b/GPUImage/Source/GPUImageCrosshatchFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +@interface GPUImageCrosshatchFilter : GPUImageFilter +{ + GLint crossHatchSpacingUniform, lineWidthUniform; +} +// The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03. +@property(readwrite, nonatomic) CGFloat crossHatchSpacing; + +// A relative width for the crosshatch lines. The default is 0.003. +@property(readwrite, nonatomic) CGFloat lineWidth; + +@end diff --git a/GPUImage/Source/GPUImageCrosshatchFilter.m b/GPUImage/Source/GPUImageCrosshatchFilter.m new file mode 100755 index 0000000..51dbd59 --- /dev/null +++ b/GPUImage/Source/GPUImageCrosshatchFilter.m @@ -0,0 +1,163 @@ +#import "GPUImageCrosshatchFilter.h" + +// Shader code based on http://machinesdontcare.wordpress.com/ + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float crossHatchSpacing; + uniform highp float lineWidth; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + highp float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W); + + lowp vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0); + if (luminance < 1.00) + { + if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + if (luminance < 0.75) + { + if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + if (luminance < 0.50) + { + if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + if (luminance < 0.3) + { + if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + + gl_FragColor = colorToDisplay; + } +); +#else +NSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float crossHatchSpacing; + uniform float lineWidth; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W); + + vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0); + if (luminance < 1.00) + { + if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + if (luminance < 0.75) + { + if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + if (luminance < 0.50) + { + if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + if (luminance < 0.3) + { + if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) + { + colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0); + } + } + + gl_FragColor = colorToDisplay; + } +); +#endif + +@implementation GPUImageCrosshatchFilter + +@synthesize crossHatchSpacing = _crossHatchSpacing; +@synthesize lineWidth = _lineWidth; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageCrosshatchFragmentShaderString])) + { + return nil; + } + + crossHatchSpacingUniform = [filterProgram uniformIndex:@"crossHatchSpacing"]; + lineWidthUniform = [filterProgram uniformIndex:@"lineWidth"]; + + self.crossHatchSpacing = 0.03; + self.lineWidth = 0.003; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setCrossHatchSpacing:(CGFloat)newValue; +{ + CGFloat singlePixelSpacing; + if (inputTextureSize.width != 0.0) + { + singlePixelSpacing = 1.0 / inputTextureSize.width; + } + else + { + singlePixelSpacing = 1.0 / 2048.0; + } + + if (newValue < singlePixelSpacing) + { + _crossHatchSpacing = singlePixelSpacing; + } + else + { + _crossHatchSpacing = newValue; + } + + [self setFloat:_crossHatchSpacing forUniform:crossHatchSpacingUniform program:filterProgram]; +} + +- (void)setLineWidth:(CGFloat)newValue; +{ + _lineWidth = newValue; + + [self setFloat:_lineWidth forUniform:lineWidthUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageDarkenBlendFilter.h b/GPUImage/Source/GPUImageDarkenBlendFilter.h new file mode 100755 index 0000000..5dfe340 --- /dev/null +++ b/GPUImage/Source/GPUImageDarkenBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageDarkenBlendFilter.m b/GPUImage/Source/GPUImageDarkenBlendFilter.m new file mode 100644 index 0000000..85ec9e8 --- /dev/null +++ b/GPUImage/Source/GPUImageDarkenBlendFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageDarkenBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 base = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0); + } +); +#else +NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0); + } + ); +#endif + +@implementation GPUImageDarkenBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageDarkenBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageDifferenceBlendFilter.h b/GPUImage/Source/GPUImageDifferenceBlendFilter.h new file mode 100755 index 0000000..7c7dfc2 --- /dev/null +++ b/GPUImage/Source/GPUImageDifferenceBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageDifferenceBlendFilter.m b/GPUImage/Source/GPUImageDifferenceBlendFilter.m new file mode 100755 index 0000000..01bf09b --- /dev/null +++ b/GPUImage/Source/GPUImageDifferenceBlendFilter.m @@ -0,0 +1,50 @@ +#import "GPUImageDifferenceBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a); + } +); +#else +NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a); + } +); +#endif + +@implementation GPUImageDifferenceBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageDifferenceBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageDilationFilter.h b/GPUImage/Source/GPUImageDilationFilter.h new file mode 100644 index 0000000..59423a3 --- /dev/null +++ b/GPUImage/Source/GPUImageDilationFilter.h @@ -0,0 +1,16 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +// For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center. +// This extends out bright features, and is most commonly used with black-and-white thresholded images. + +extern NSString *const kGPUImageDilationRadiusOneVertexShaderString; +extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString; +extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString; +extern NSString *const kGPUImageDilationRadiusFourVertexShaderString; + +@interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter + +// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4. +- (id)initWithRadius:(NSUInteger)dilationRadius; + +@end diff --git a/GPUImage/Source/GPUImageDilationFilter.m b/GPUImage/Source/GPUImageDilationFilter.m new file mode 100644 index 0000000..df06518 --- /dev/null +++ b/GPUImage/Source/GPUImageDilationFilter.m @@ -0,0 +1,431 @@ +#import "GPUImageDilationFilter.h" + +@implementation GPUImageDilationFilter + +NSString *const kGPUImageDilationRadiusOneVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec2 inputTextureCoordinate; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 offset = vec2(texelWidthOffset, texelHeightOffset); + + centerTextureCoordinate = inputTextureCoordinate; + oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset; + oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset; + } +); + +NSString *const kGPUImageDilationRadiusTwoVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec2 inputTextureCoordinate; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 offset = vec2(texelWidthOffset, texelHeightOffset); + + centerTextureCoordinate = inputTextureCoordinate; + oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset; + oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset; + twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0); + twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0); + } +); + +NSString *const kGPUImageDilationRadiusThreeVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec2 inputTextureCoordinate; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 offset = vec2(texelWidthOffset, texelHeightOffset); + + centerTextureCoordinate = inputTextureCoordinate; + oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset; + oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset; + twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0); + twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0); + threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0); + threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0); + } +); + +NSString *const kGPUImageDilationRadiusFourVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec2 inputTextureCoordinate; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 offset = vec2(texelWidthOffset, texelHeightOffset); + + centerTextureCoordinate = inputTextureCoordinate; + oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset; + oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset; + twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0); + twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0); + threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0); + threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0); + fourStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 4.0); + fourStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 4.0); + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + + lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); + +NSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + + lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); + +NSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + + lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + maxValue = max(maxValue, threeStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); + +NSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r; + float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r; + + lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + maxValue = max(maxValue, threeStepsNegativeIntensity); + maxValue = max(maxValue, fourStepsPositiveIntensity); + maxValue = max(maxValue, fourStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); +#else +NSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + + float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); + +NSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + + float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); + +NSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + + float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + maxValue = max(maxValue, threeStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); + +NSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r; + float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r; + + float maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + maxValue = max(maxValue, threeStepsNegativeIntensity); + maxValue = max(maxValue, fourStepsPositiveIntensity); + maxValue = max(maxValue, fourStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(maxValue), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithRadius:(NSUInteger)dilationRadius; +{ + NSString *fragmentShaderForThisRadius = nil; + NSString *vertexShaderForThisRadius = nil; + + switch (dilationRadius) + { + case 0: + case 1: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString; + fragmentShaderForThisRadius = kGPUImageDilationRadiusOneFragmentShaderString; + }; break; + case 2: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString; + fragmentShaderForThisRadius = kGPUImageDilationRadiusTwoFragmentShaderString; + }; break; + case 3: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString; + fragmentShaderForThisRadius = kGPUImageDilationRadiusThreeFragmentShaderString; + }; break; + case 4: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString; + }; break; + default: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString; + }; break; + } + + if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius])) + { + return nil; + } + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h b/GPUImage/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h new file mode 100644 index 0000000..fdffb9f --- /dev/null +++ b/GPUImage/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageFilter.h" + +@interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter +{ + GLint texelWidthUniform, texelHeightUniform; + GLint upperThresholdUniform, lowerThresholdUniform; + + BOOL hasOverriddenImageSizeFactor; +} + +// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects. +@property(readwrite, nonatomic) CGFloat texelWidth; +@property(readwrite, nonatomic) CGFloat texelHeight; + +// These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold) +@property(readwrite, nonatomic) CGFloat upperThreshold; +@property(readwrite, nonatomic) CGFloat lowerThreshold; + +@end diff --git a/GPUImage/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m b/GPUImage/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m new file mode 100644 index 0000000..b442f3a --- /dev/null +++ b/GPUImage/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m @@ -0,0 +1,141 @@ +#import "GPUImageDirectionalNonMaximumSuppressionFilter.h" + +@implementation GPUImageDirectionalNonMaximumSuppressionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING +( + precision mediump float; + + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform highp float texelWidth; + uniform highp float texelHeight; + uniform mediump float upperThreshold; + uniform mediump float lowerThreshold; + + void main() + { + vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb; + vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight); + + float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r; + float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r; + + float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r); + multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r); + + float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r); + multiplier = multiplier * thresholdCompliance; + + gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0); + } +); +#else +NSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float texelWidth; + uniform float texelHeight; + uniform float upperThreshold; + uniform float lowerThreshold; + + void main() + { + vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb; + vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight); + + float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r; + float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r; + + float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r); + multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r); + + float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r); + multiplier = multiplier * thresholdCompliance; + + gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0); + } +); +#endif + +@synthesize texelWidth = _texelWidth; +@synthesize texelHeight = _texelHeight; +@synthesize upperThreshold = _upperThreshold; +@synthesize lowerThreshold = _lowerThreshold; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString])) + { + return nil; + } + + texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"]; + upperThresholdUniform = [filterProgram uniformIndex:@"upperThreshold"]; + lowerThresholdUniform = [filterProgram uniformIndex:@"lowerThreshold"]; + + self.upperThreshold = 0.5; + self.lowerThreshold = 0.1; + + return self; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + if (!hasOverriddenImageSizeFactor) + { + _texelWidth = 1.0 / filterFrameSize.width; + _texelHeight = 1.0 / filterFrameSize.height; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + glUniform1f(texelWidthUniform, _texelWidth); + glUniform1f(texelHeightUniform, _texelHeight); + }); + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setTexelWidth:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelWidth = newValue; + + [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram]; +} + +- (void)setTexelHeight:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelHeight = newValue; + + [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram]; +} + +- (void)setLowerThreshold:(CGFloat)newValue; +{ + _lowerThreshold = newValue; + + [self setFloat:_lowerThreshold forUniform:lowerThresholdUniform program:filterProgram]; +} + +- (void)setUpperThreshold:(CGFloat)newValue; +{ + _upperThreshold = newValue; + + [self setFloat:_upperThreshold forUniform:upperThresholdUniform program:filterProgram]; +} + + + +@end diff --git a/GPUImage/Source/GPUImageDissolveBlendFilter.h b/GPUImage/Source/GPUImageDissolveBlendFilter.h new file mode 100755 index 0000000..b4e5720 --- /dev/null +++ b/GPUImage/Source/GPUImageDissolveBlendFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter +{ + GLint mixUniform; +} + +// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level +@property(readwrite, nonatomic) CGFloat mix; + +@end diff --git a/GPUImage/Source/GPUImageDissolveBlendFilter.m b/GPUImage/Source/GPUImageDissolveBlendFilter.m new file mode 100755 index 0000000..b4a5609 --- /dev/null +++ b/GPUImage/Source/GPUImageDissolveBlendFilter.m @@ -0,0 +1,72 @@ +#import "GPUImageDissolveBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + uniform lowp float mixturePercent; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = mix(textureColor, textureColor2, mixturePercent); + } +); +#else +NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + uniform float mixturePercent; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = mix(textureColor, textureColor2, mixturePercent); + } +); +#endif + +@implementation GPUImageDissolveBlendFilter + +@synthesize mix = _mix; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageDissolveBlendFragmentShaderString])) + { + return nil; + } + + mixUniform = [filterProgram uniformIndex:@"mixturePercent"]; + self.mix = 0.5; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setMix:(CGFloat)newValue; +{ + _mix = newValue; + + [self setFloat:_mix forUniform:mixUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageDivideBlendFilter.h b/GPUImage/Source/GPUImageDivideBlendFilter.h new file mode 100644 index 0000000..ad798e2 --- /dev/null +++ b/GPUImage/Source/GPUImageDivideBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageDivideBlendFilter.m b/GPUImage/Source/GPUImageDivideBlendFilter.m new file mode 100644 index 0000000..63ee071 --- /dev/null +++ b/GPUImage/Source/GPUImageDivideBlendFilter.m @@ -0,0 +1,96 @@ +#import "GPUImageDivideBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 base = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + mediump float ra; + if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a))) + ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + else + ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + + + mediump float ga; + if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a))) + ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + else + ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + + + mediump float ba; + if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a))) + ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + else + ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + + mediump float a = overlay.a + base.a - overlay.a * base.a; + + gl_FragColor = vec4(ra, ga, ba, a); + } +); +#else +NSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + float ra; + if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a))) + ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + else + ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + + + float ga; + if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a))) + ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + else + ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + + + float ba; + if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a))) + ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + else + ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + + float a = overlay.a + base.a - overlay.a * base.a; + + gl_FragColor = vec4(ra, ga, ba, a); + } + ); +#endif + +@implementation GPUImageDivideBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageDivideBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageEmbossFilter.h b/GPUImage/Source/GPUImageEmbossFilter.h new file mode 100755 index 0000000..dbd21e8 --- /dev/null +++ b/GPUImage/Source/GPUImageEmbossFilter.h @@ -0,0 +1,8 @@ +#import "GPUImage3x3ConvolutionFilter.h" + +@interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter + +// The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level +@property(readwrite, nonatomic) CGFloat intensity; + +@end diff --git a/GPUImage/Source/GPUImageEmbossFilter.m b/GPUImage/Source/GPUImageEmbossFilter.m new file mode 100755 index 0000000..6ba48cd --- /dev/null +++ b/GPUImage/Source/GPUImageEmbossFilter.m @@ -0,0 +1,49 @@ +#import "GPUImageEmbossFilter.h" + +@implementation GPUImageEmbossFilter + +@synthesize intensity = _intensity; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + self.intensity = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setIntensity:(CGFloat)newValue; +{ +// [(GPUImage3x3ConvolutionFilter *)filter setConvolutionMatrix:(GPUMatrix3x3){ +// {-2.0f, -1.0f, 0.0f}, +// {-1.0f, 1.0f, 1.0f}, +// { 0.0f, 1.0f, 2.0f} +// }]; + + _intensity = newValue; + + GPUMatrix3x3 newConvolutionMatrix; + newConvolutionMatrix.one.one = _intensity * (-2.0); + newConvolutionMatrix.one.two = -_intensity; + newConvolutionMatrix.one.three = 0.0f; + + newConvolutionMatrix.two.one = -_intensity; + newConvolutionMatrix.two.two = 1.0; + newConvolutionMatrix.two.three = _intensity; + + newConvolutionMatrix.three.one = 0.0f; + newConvolutionMatrix.three.two = _intensity; + newConvolutionMatrix.three.three = _intensity * 2.0; + + self.convolutionKernel = newConvolutionMatrix; +} + + +@end diff --git a/GPUImage/Source/GPUImageErosionFilter.h b/GPUImage/Source/GPUImageErosionFilter.h new file mode 100644 index 0000000..b311a26 --- /dev/null +++ b/GPUImage/Source/GPUImageErosionFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +// For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center. +// This extends out dark features, and is most commonly used with black-and-white thresholded images. + +@interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter + +// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4. +- (id)initWithRadius:(NSUInteger)erosionRadius; + +@end diff --git a/GPUImage/Source/GPUImageErosionFilter.m b/GPUImage/Source/GPUImageErosionFilter.m new file mode 100644 index 0000000..05f4f28 --- /dev/null +++ b/GPUImage/Source/GPUImageErosionFilter.m @@ -0,0 +1,312 @@ +#import "GPUImageErosionFilter.h" +#import "GPUImageDilationFilter.h" + +@implementation GPUImageErosionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + + lowp float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); + +NSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + + lowp float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); + +NSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + + lowp float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + minValue = min(minValue, threeStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); + +NSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r; + float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r; + + lowp float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + minValue = min(minValue, threeStepsNegativeIntensity); + minValue = min(minValue, fourStepsPositiveIntensity); + minValue = min(minValue, fourStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); +#else +NSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + + float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); + +NSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + + float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); + +NSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + + float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + minValue = min(minValue, threeStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); + +NSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r; + float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r; + float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r; + float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r; + float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r; + float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r; + float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r; + float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r; + float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r; + + float minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + minValue = min(minValue, threeStepsNegativeIntensity); + minValue = min(minValue, fourStepsPositiveIntensity); + minValue = min(minValue, fourStepsNegativeIntensity); + + gl_FragColor = vec4(vec3(minValue), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithRadius:(NSUInteger)dilationRadius; +{ + NSString *fragmentShaderForThisRadius = nil; + NSString *vertexShaderForThisRadius = nil; + + switch (dilationRadius) + { + case 0: + case 1: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString; + fragmentShaderForThisRadius = kGPUImageErosionRadiusOneFragmentShaderString; + }; break; + case 2: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString; + fragmentShaderForThisRadius = kGPUImageErosionRadiusTwoFragmentShaderString; + }; break; + case 3: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString; + fragmentShaderForThisRadius = kGPUImageErosionRadiusThreeFragmentShaderString; + }; break; + case 4: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString; + }; break; + default: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString; + }; break; + } + + if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius])) + { + return nil; + } + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageExclusionBlendFilter.h b/GPUImage/Source/GPUImageExclusionBlendFilter.h new file mode 100755 index 0000000..f7c83f5 --- /dev/null +++ b/GPUImage/Source/GPUImageExclusionBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageExclusionBlendFilter.m b/GPUImage/Source/GPUImageExclusionBlendFilter.m new file mode 100755 index 0000000..c364159 --- /dev/null +++ b/GPUImage/Source/GPUImageExclusionBlendFilter.m @@ -0,0 +1,56 @@ +#import "GPUImageExclusionBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 base = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + // Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa) + + gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a); + } +); +#else +NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + // Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa) + + gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a); + } + ); +#endif + +@implementation GPUImageExclusionBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageExclusionBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageExposureFilter.h b/GPUImage/Source/GPUImageExposureFilter.h new file mode 100755 index 0000000..886a052 --- /dev/null +++ b/GPUImage/Source/GPUImageExposureFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageFilter.h" + +@interface GPUImageExposureFilter : GPUImageFilter +{ + GLint exposureUniform; +} + +// Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level +@property(readwrite, nonatomic) CGFloat exposure; + +@end diff --git a/GPUImage/Source/GPUImageExposureFilter.m b/GPUImage/Source/GPUImageExposureFilter.m new file mode 100755 index 0000000..d5ee2c9 --- /dev/null +++ b/GPUImage/Source/GPUImageExposureFilter.m @@ -0,0 +1,66 @@ +#import "GPUImageExposureFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform highp float exposure; + + void main() + { + highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w); + } +); +#else +NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float exposure; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w); + } +); +#endif + +@implementation GPUImageExposureFilter + +@synthesize exposure = _exposure; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString])) + { + return nil; + } + + exposureUniform = [filterProgram uniformIndex:@"exposure"]; + self.exposure = 0.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setExposure:(CGFloat)newValue; +{ + _exposure = newValue; + + [self setFloat:_exposure forUniform:exposureUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageFASTCornerDetectionFilter.h b/GPUImage/Source/GPUImageFASTCornerDetectionFilter.h new file mode 100644 index 0000000..0cf8b8a --- /dev/null +++ b/GPUImage/Source/GPUImageFASTCornerDetectionFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageFilterGroup.h" + +@interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup +{ +// Generate a lookup texture based on the bit patterns + +// Step 1: convert to monochrome if necessary +// Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components +// Step 3: do non-maximum suppression of close corner points +} +@end diff --git a/GPUImage/Source/GPUImageFASTCornerDetectionFilter.m b/GPUImage/Source/GPUImageFASTCornerDetectionFilter.m new file mode 100644 index 0000000..814029f --- /dev/null +++ b/GPUImage/Source/GPUImageFASTCornerDetectionFilter.m @@ -0,0 +1,5 @@ +#import "GPUImageFASTCornerDetectionFilter.h" + +@implementation GPUImageFASTCornerDetectionFilter + +@end diff --git a/GPUImage/Source/GPUImageFalseColorFilter.h b/GPUImage/Source/GPUImageFalseColorFilter.h new file mode 100644 index 0000000..cb0b82f --- /dev/null +++ b/GPUImage/Source/GPUImageFalseColorFilter.h @@ -0,0 +1,15 @@ +#import "GPUImageFilter.h" + +@interface GPUImageFalseColorFilter : GPUImageFilter +{ + GLint firstColorUniform, secondColorUniform; +} + +// The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0). +@property(readwrite, nonatomic) GPUVector4 firstColor; +@property(readwrite, nonatomic) GPUVector4 secondColor; + +- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; + +@end diff --git a/GPUImage/Source/GPUImageFalseColorFilter.m b/GPUImage/Source/GPUImageFalseColorFilter.m new file mode 100644 index 0000000..f514dba --- /dev/null +++ b/GPUImage/Source/GPUImageFalseColorFilter.m @@ -0,0 +1,101 @@ +#import "GPUImageFalseColorFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float intensity; + uniform vec3 firstColor; + uniform vec3 secondColor; + + const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, luminanceWeighting); + + gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a); + } +); +#else +NSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float intensity; + uniform vec3 firstColor; + uniform vec3 secondColor; + + const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, luminanceWeighting); + + gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a); + } +); +#endif + + +@implementation GPUImageFalseColorFilter + +@synthesize secondColor = _secondColor; +@synthesize firstColor = _firstColor; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUFalseColorFragmentShaderString])) + { + return nil; + } + + firstColorUniform = [filterProgram uniformIndex:@"firstColor"]; + secondColorUniform = [filterProgram uniformIndex:@"secondColor"]; + + self.firstColor = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f}; + self.secondColor = (GPUVector4){1.0f, 0.0f, 0.0f, 1.0f}; + + return self; +} + + +#pragma mark - +#pragma mark Accessors + +- (void)setFirstColor:(GPUVector4)newValue; +{ + _firstColor = newValue; + + [self setFirstColorRed:_firstColor.one green:_firstColor.two blue:_firstColor.three]; +} + +- (void)setSecondColor:(GPUVector4)newValue; +{ + _secondColor = newValue; + + [self setSecondColorRed:_secondColor.one green:_secondColor.two blue:_secondColor.three]; +} + +- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 firstColor = {redComponent, greenComponent, blueComponent}; + + [self setVec3:firstColor forUniform:firstColorUniform program:filterProgram]; +} + +- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 secondColor = {redComponent, greenComponent, blueComponent}; + + [self setVec3:secondColor forUniform:secondColorUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageFilter.h b/GPUImage/Source/GPUImageFilter.h new file mode 100755 index 0000000..0084a52 --- /dev/null +++ b/GPUImage/Source/GPUImageFilter.h @@ -0,0 +1,154 @@ +#import "GPUImageOutput.h" + +#define STRINGIZE(x) #x +#define STRINGIZE2(x) STRINGIZE(x) +#define SHADER_STRING(text) @ STRINGIZE2(text) + +#define GPUImageHashIdentifier # +#define GPUImageWrappedLabel(x) x +#define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a + +extern NSString *const kGPUImageVertexShaderString; +extern NSString *const kGPUImagePassthroughFragmentShaderString; + +struct GPUVector4 { + GLfloat one; + GLfloat two; + GLfloat three; + GLfloat four; +}; +typedef struct GPUVector4 GPUVector4; + +struct GPUVector3 { + GLfloat one; + GLfloat two; + GLfloat three; +}; +typedef struct GPUVector3 GPUVector3; + +struct GPUMatrix4x4 { + GPUVector4 one; + GPUVector4 two; + GPUVector4 three; + GPUVector4 four; +}; +typedef struct GPUMatrix4x4 GPUMatrix4x4; + +struct GPUMatrix3x3 { + GPUVector3 one; + GPUVector3 two; + GPUVector3 three; +}; +typedef struct GPUMatrix3x3 GPUMatrix3x3; + +/** GPUImage's base filter class + + Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter. + */ +@interface GPUImageFilter : GPUImageOutput +{ + GLuint filterSourceTexture; + + GLuint filterFramebuffer; + + GLProgram *filterProgram; + GLint filterPositionAttribute, filterTextureCoordinateAttribute; + GLint filterInputTextureUniform; + GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha; + + BOOL preparedToCaptureImage; + BOOL isEndProcessing; + + // Texture caches are an iOS-specific capability +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CVOpenGLESTextureCacheRef filterTextureCache; + CVPixelBufferRef renderTarget; + CVOpenGLESTextureRef renderTexture; +#else +#endif + + CGSize currentFilterSize; + GPUImageRotationMode inputRotation; + + BOOL currentlyReceivingMonochromeInput; + + NSMutableDictionary *uniformStateRestorationBlocks; +} + +@property(readonly) CVPixelBufferRef renderTarget; +@property(readwrite, nonatomic) BOOL preventRendering; +@property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput; + +/// @name Initialization and teardown + +/** + Initialize with vertex and fragment shaders + + You make take advantage of the SHADER_STRING macro to write your shaders in-line. + @param vertexShaderString Source code of the vertex shader to use + @param fragmentShaderString Source code of the fragment shader to use + */ +- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString; + +/** + Initialize with a fragment shader + + You may take advantage of the SHADER_STRING macro to write your shader in-line. + @param fragmentShaderString Source code of fragment shader to use + */ +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +/** + Initialize with a fragment shader + @param fragmentShaderFilename Filename of fragment shader to load + */ +- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename; +- (void)initializeAttributes; +- (void)setupFilterForSize:(CGSize)filterFrameSize; +- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex; +- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation; + +- (void)recreateFilterFBO; + +/// @name Managing the display FBOs +/** Size of the frame buffer object + */ +- (CGSize)sizeOfFBO; +- (void)createFilterFBOofSize:(CGSize)currentFBOSize; + +/** Destroy the current filter frame buffer object + */ +- (void)destroyFilterFBO; +- (void)setFilterFBO; +- (void)setOutputFBO; +- (void)releaseInputTexturesIfNeeded; + +/// @name Rendering ++ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode; +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime; +- (CGSize)outputFrameSize; + +/// @name Input parameters +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; +- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName; +- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName; +- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName; +- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName; +- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName; +- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName; +- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName; + +- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; + +- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock; +- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex; + +@end diff --git a/GPUImage/Source/GPUImageFilter.m b/GPUImage/Source/GPUImageFilter.m new file mode 100755 index 0000000..7caf9a7 --- /dev/null +++ b/GPUImage/Source/GPUImageFilter.m @@ -0,0 +1,1047 @@ +#import "GPUImageFilter.h" +#import "GPUImagePicture.h" +#import + +// Hardcode the vertex shader for standard filters, but this can be overridden +NSString *const kGPUImageVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + varying vec2 textureCoordinate; + + void main() + { + gl_Position = position; + textureCoordinate = inputTextureCoordinate.xy; + } + ); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + +NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } +); + +#else + +NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } +); +#endif + + +void dataProviderReleaseCallback (void *info, const void *data, size_t size); +void dataProviderUnlockCallback (void *info, const void *data, size_t size); + +@implementation GPUImageFilter + +@synthesize renderTarget; +@synthesize preventRendering = _preventRendering; +@synthesize currentlyReceivingMonochromeInput; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super init])) + { + return nil; + } + + uniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10]; + preparedToCaptureImage = NO; + _preventRendering = NO; + currentlyReceivingMonochromeInput = NO; + inputRotation = kGPUImageNoRotation; + backgroundColorRed = 0.0; + backgroundColorGreen = 0.0; + backgroundColorBlue = 0.0; + backgroundColorAlpha = 0.0; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]; + + if (!filterProgram.initialized) + { + [self initializeAttributes]; + + if (![filterProgram link]) + { + NSString *progLog = [filterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [filterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [filterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + filterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + filterPositionAttribute = [filterProgram attributeIndex:@"position"]; + filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"]; + filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader + + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glEnableVertexAttribArray(filterPositionAttribute); + glEnableVertexAttribArray(filterTextureCoordinateAttribute); + }); + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [self initWithVertexShaderFromString:kGPUImageVertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename; +{ + NSString *fragmentShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentShaderFilename ofType:@"fsh"]; + NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragmentShaderPathname encoding:NSUTF8StringEncoding error:nil]; + + if (!(self = [self initWithFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (void)initializeAttributes; +{ + [filterProgram addAttribute:@"position"]; + [filterProgram addAttribute:@"inputTextureCoordinate"]; + + // Override this, calling back to this super method, in order to add new attributes to your vertex shader +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + // This is where you can override to provide some custom setup, if your filter has a size-dependent element +} + +- (void)dealloc +{ + [self destroyFilterFBO]; +} + +#pragma mark - +#pragma mark Still image processing + +void dataProviderReleaseCallback (void *info, const void *data, size_t size) +{ + free((void *)data); +} + +void dataProviderUnlockCallback (void *info, const void *data, size_t size) +{ + GPUImageFilter *filter = (__bridge_transfer GPUImageFilter*)info; + + CVPixelBufferUnlockBaseAddress([filter renderTarget], 0); + if ([filter renderTarget]) { + CFRelease([filter renderTarget]); + } + + [filter destroyFilterFBO]; + + filter.preventRendering = NO; +} + +- (CGImageRef)newCGImageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation +{ + + // a CGImage can only be created from a 'normal' color texture + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + + __block CGImageRef cgImageFromBytes; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + CGSize currentFBOSize = [self sizeOfFBO]; + NSUInteger totalBytesForImage = (int)currentFBOSize.width * (int)currentFBOSize.height * 4; + // It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache + NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0; + NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)currentFBOSize.height * 4; + + GLubyte *rawImagePixels; + + CGDataProviderRef dataProvider; + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + // glFlush(); + glFinish(); + CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation + CVPixelBufferLockBaseAddress(renderTarget, 0); + self.preventRendering = YES; // Locks don't seem to work, so prevent any rendering to the filter which might overwrite the pixel buffer data until done processing + rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget); + dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback); + } + else + { + [self setOutputFBO]; + rawImagePixels = (GLubyte *)malloc(totalBytesForImage); + glReadPixels(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); + dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback); + } + + + CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB(); + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + cgImageFromBytes = CGImageCreate((int)currentFBOSize.width, (int)currentFBOSize.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault); + } + else + { + cgImageFromBytes = CGImageCreate((int)currentFBOSize.width, (int)currentFBOSize.height, 8, 32, 4 * (int)currentFBOSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault); + } + + // Capture image with current device orientation + CGDataProviderRelease(dataProvider); + CGColorSpaceRelease(defaultRGBColorSpace); + }); + + return cgImageFromBytes; +} + +- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter +{ + return [self newCGImageByFilteringCGImage:imageToFilter orientation:UIImageOrientationUp]; +} + +- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter orientation:(UIImageOrientation)orientation; +{ + GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithCGImage:imageToFilter]; + + [stillImageSource addTarget:self]; + [stillImageSource processImage]; + + CGImageRef processedImage = [self newCGImageFromCurrentlyProcessedOutputWithOrientation:orientation]; + + [stillImageSource removeTarget:self]; + return processedImage; +} + +#pragma mark - +#pragma mark Managing the display FBOs + +- (CGSize)sizeOfFBO; +{ + CGSize outputSize = [self maximumOutputSize]; + if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) ) + { + return inputTextureSize; + } + else + { + return outputSize; + } +} + +- (void)createFilterFBOofSize:(CGSize)currentFBOSize; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + glActiveTexture(GL_TEXTURE1); + + glGenFramebuffers(1, &filterFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer); + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#endif + + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ + + CFDictionaryRef empty; // empty value for attr value. + CFMutableDictionaryRef attrs; + empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary + attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); + CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty); + + err = CVPixelBufferCreate(kCFAllocatorDefault, (int)currentFBOSize.width, (int)currentFBOSize.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget); + if (err) + { + NSLog(@"FBO size: %f, %f", currentFBOSize.width, currentFBOSize.height); + NSAssert(NO, @"Error at CVPixelBufferCreate %d", err); + } + + err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, + filterTextureCache, renderTarget, + NULL, // texture attributes + GL_TEXTURE_2D, + self.outputTextureOptions.internalFormat, // opengl format + (int)currentFBOSize.width, + (int)currentFBOSize.height, + self.outputTextureOptions.format, // native iOS format + self.outputTextureOptions.type, + 0, + &renderTexture); + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + CFRelease(attrs); + CFRelease(empty); + + glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); + outputTexture = CVOpenGLESTextureGetName(renderTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); + + [self notifyTargetsAboutNewOutputTexture]; +#endif + } + else + { + [self initializeOutputTextureIfNeeded]; + + glBindTexture(GL_TEXTURE_2D, outputTexture); + +// if ([self providesMonochromeOutput] && [GPUImageContext deviceSupportsRedTextures]) +// { +// glTexImage2D(GL_TEXTURE_2D, 0, GL_RG_EXT, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RG_EXT, GL_UNSIGNED_BYTE, 0); +// } +// else +// { + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + (int)currentFBOSize.width, + (int)currentFBOSize.height, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, + 0); +// } + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0); +// glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer); +// GLenum att = GL_COLOR_ATTACHMENT0; +// glDrawBuffers(1, &att); + [self notifyTargetsAboutNewOutputTexture]; + } + + // NSLog(@"Filter size: %f, %f for filter: %@", currentFBOSize.width, currentFBOSize.height, self); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + glBindTexture(GL_TEXTURE_2D, 0); + }); +} + +- (void)destroyFilterFBO; +{ + if (filterFramebuffer) + { + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + glDeleteFramebuffers(1, &filterFramebuffer); + filterFramebuffer = 0; + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + if (filterTextureCache != NULL) + { + CFRelease(renderTarget); + renderTarget = NULL; + + if (renderTexture) + { + CFRelease(renderTexture); + renderTexture = NULL; + } + + CVOpenGLESTextureCacheFlush(filterTextureCache, 0); + CFRelease(filterTextureCache); + filterTextureCache = NULL; + } +#endif + }); + } +} + +- (void)setFilterFBO; +{ + if (!filterFramebuffer) + { + CGSize currentFBOSize = [self sizeOfFBO]; + [self createFilterFBOofSize:currentFBOSize]; + [self setupFilterForSize:currentFBOSize]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer); + + CGSize currentFBOSize = [self sizeOfFBO]; + glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height); +} + +- (void)setOutputFBO; +{ + // Override this for filters that have multiple framebuffers + [self setFilterFBO]; +} + +- (void)releaseInputTexturesIfNeeded; +{ + if (shouldConserveMemoryForNextFrame) + { + [firstTextureDelegate textureNoLongerNeededForTarget:self]; + shouldConserveMemoryForNextFrame = NO; + } +} + +#pragma mark - +#pragma mark Rendering + ++ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode; +{ + static const GLfloat noRotationTextureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat rotateLeftTextureCoordinates[] = { + 1.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 0.0f, 1.0f, + }; + + static const GLfloat rotateRightTextureCoordinates[] = { + 0.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 1.0f, 0.0f, + }; + + static const GLfloat verticalFlipTextureCoordinates[] = { + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f, + }; + + static const GLfloat horizontalFlipTextureCoordinates[] = { + 1.0f, 0.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 1.0f, + }; + + static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = { + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f, + }; + + static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = { + 1.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 0.0f, 0.0f, + }; + + static const GLfloat rotate180TextureCoordinates[] = { + 1.0f, 1.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 0.0f, + }; + + switch(rotationMode) + { + case kGPUImageNoRotation: return noRotationTextureCoordinates; + case kGPUImageRotateLeft: return rotateLeftTextureCoordinates; + case kGPUImageRotateRight: return rotateRightTextureCoordinates; + case kGPUImageFlipVertical: return verticalFlipTextureCoordinates; + case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates; + case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates; + case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates; + case kGPUImageRotate180: return rotate180TextureCoordinates; + } +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + [GPUImageContext setActiveShaderProgram:filterProgram]; + [self setFilterFBO]; + [self setUniformsForProgramAtIndex:0]; + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, sourceTexture); + + glUniform1i(filterInputTextureUniform, 2); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime; +{ + if (self.frameProcessingCompletionBlock != NULL) + { + self.frameProcessingCompletionBlock(self, frameTime); + } + + [self releaseInputTexturesIfNeeded]; + + for (id currentTarget in targets) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + [self setInputTextureForTarget:currentTarget atIndex:textureIndex]; + } + + [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex]; + [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex]; + } + } +} + +- (CGSize)outputFrameSize; +{ + return inputTextureSize; +} + +- (void)prepareForImageCapture; +{ + if (preparedToCaptureImage) + { + return; + } + + preparedToCaptureImage = YES; + + if ([GPUImageContext supportsFastTextureUpload]) + { + if (outputTexture) + { + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + }); + } + } +} + +#pragma mark - +#pragma mark Input parameters + +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; +{ + backgroundColorRed = redComponent; + backgroundColorGreen = greenComponent; + backgroundColorBlue = blueComponent; + backgroundColorAlpha = alphaComponent; +} + +- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName; +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + [self setInteger:newInteger forUniform:uniformIndex program:filterProgram]; +} + +- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName; +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + [self setFloat:newFloat forUniform:uniformIndex program:filterProgram]; +} + +- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName; +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + [self setSize:newSize forUniform:uniformIndex program:filterProgram]; +} + +- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName; +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + [self setPoint:newPoint forUniform:uniformIndex program:filterProgram]; +} + +- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName; +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + [self setVec3:newVec3 forUniform:uniformIndex program:filterProgram]; +} + +- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName; +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + [self setVec4:newVec4 forUniform:uniformIndex program:filterProgram]; +} + +- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName +{ + GLint uniformIndex = [filterProgram uniformIndex:uniformName]; + + [self setFloatArray:array length:count forUniform:uniformIndex program:filterProgram]; +} + +- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniformMatrix3fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix); + }]; + }); +} + +- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniformMatrix4fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix); + }]; + }); +} + +- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniform1f(uniform, floatValue); + }]; + }); +} + +- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + GLfloat positionArray[2]; + positionArray[0] = pointValue.x; + positionArray[1] = pointValue.y; + + glUniform2fv(uniform, 1, positionArray); + }]; + }); +} + +- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + GLfloat sizeArray[2]; + sizeArray[0] = sizeValue.width; + sizeArray[1] = sizeValue.height; + + glUniform2fv(uniform, 1, sizeArray); + }]; + }); +} + +- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniform3fv(uniform, 1, (GLfloat *)&vectorValue); + }]; + }); +} + +- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniform4fv(uniform, 1, (GLfloat *)&vectorValue); + }]; + }); +} + +- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniform1fv(uniform, arrayLength, arrayValue); + }]; + }); +} + +- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:shaderProgram]; + + [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{ + glUniform1i(uniform, intValue); + }]; + }); +} + +- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock; +{ + [uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]]; + uniformStateBlock(); +} + +- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex; +{ + [uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){ + dispatch_block_t currentBlock = obj; + currentBlock(); + }]; +} + +#pragma mark - +#pragma mark GPUImageInput + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + static const GLfloat imageVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; + + [self informTargetsAboutNewFrameAtTime:frameTime]; +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + filterSourceTexture = newInputTexture; +} + +- (void)recreateFilterFBO +{ + cachedMaximumOutputSize = CGSizeZero; + if (!filterFramebuffer) + { + return; + } + + [self destroyFilterFBO]; + [self deleteOutputTexture]; + + [self setFilterFBO]; +} + +- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex; +{ + CGSize rotatedSize = sizeToRotate; + + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + rotatedSize.width = sizeToRotate.height; + rotatedSize.height = sizeToRotate.width; + } + + return rotatedSize; +} + +- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation; +{ + CGPoint rotatedPoint; + switch(rotation) + { + case kGPUImageNoRotation: return pointToRotate; break; + case kGPUImageFlipHorizonal: + { + rotatedPoint.x = 1.0 - pointToRotate.x; + rotatedPoint.y = pointToRotate.y; + }; break; + case kGPUImageFlipVertical: + { + rotatedPoint.x = pointToRotate.x; + rotatedPoint.y = 1.0 - pointToRotate.y; + }; break; + case kGPUImageRotateLeft: + { + rotatedPoint.x = 1.0 - pointToRotate.y; + rotatedPoint.y = pointToRotate.x; + }; break; + case kGPUImageRotateRight: + { + rotatedPoint.x = pointToRotate.y; + rotatedPoint.y = 1.0 - pointToRotate.x; + }; break; + case kGPUImageRotateRightFlipVertical: + { + rotatedPoint.x = pointToRotate.y; + rotatedPoint.y = pointToRotate.x; + }; break; + case kGPUImageRotateRightFlipHorizontal: + { + rotatedPoint.x = 1.0 - pointToRotate.y; + rotatedPoint.y = 1.0 - pointToRotate.x; + }; break; + case kGPUImageRotate180: + { + rotatedPoint.x = 1.0 - pointToRotate.x; + rotatedPoint.y = 1.0 - pointToRotate.y; + }; break; + } + + return rotatedPoint; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + if (self.preventRendering) + { + return; + } + + if (overrideInputSize) + { + if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero)) + { + return; + } + else + { + CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height)); + inputTextureSize = insetRect.size; + return; + } + } + + CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex]; + + if (CGSizeEqualToSize(rotatedSize, CGSizeZero)) + { + inputTextureSize = rotatedSize; + } + else if (!CGSizeEqualToSize(inputTextureSize, rotatedSize)) + { + inputTextureSize = rotatedSize; + [self recreateFilterFBO]; + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = newInputRotation; +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + if (CGSizeEqualToSize(frameSize, CGSizeZero)) + { + overrideInputSize = NO; + } + else + { + overrideInputSize = YES; + inputTextureSize = frameSize; + forcedMaximumSize = CGSizeZero; + } + + [self destroyFilterFBO]; + + for (id currentTarget in targets) + { + if ([currentTarget respondsToSelector:@selector(destroyFilterFBO)]) { + [currentTarget performSelector:@selector(destroyFilterFBO)]; + } + } +} + +- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize; +{ + if (CGSizeEqualToSize(frameSize, CGSizeZero)) + { + overrideInputSize = NO; + inputTextureSize = CGSizeZero; + forcedMaximumSize = CGSizeZero; + } + else + { + overrideInputSize = YES; + forcedMaximumSize = frameSize; + } + + [self destroyFilterFBO]; + + for (id currentTarget in targets) + { + if ([currentTarget respondsToSelector:@selector(destroyFilterFBO)]) { + [currentTarget performSelector:@selector(destroyFilterFBO)]; + } + } +} + +- (void)cleanupOutputImage; +{ +// NSLog(@"Cleaning up output filter image: %@", self); + [self destroyFilterFBO]; + [self deleteOutputTexture]; +} + +- (void)deleteOutputTexture; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (!([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage)) + { + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + } + }); +} + +- (CGSize)maximumOutputSize; +{ + // I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better + return CGSizeZero; + + /* + if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero)) + { + for (id currentTarget in targets) + { + if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width) + { + cachedMaximumOutputSize = [currentTarget maximumOutputSize]; + } + } + } + + return cachedMaximumOutputSize; + */ +} + +- (void)endProcessing +{ + if (!isEndProcessing) + { + isEndProcessing = YES; + + for (id currentTarget in targets) + { + [currentTarget endProcessing]; + } + } +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + firstTextureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + if (overrideInputSize) + { + return; + } + + shouldConserveMemoryForNextFrame = YES; + + for (id currentTarget in targets) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + [currentTarget conserveMemoryForNextFrame]; + } + } +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +#pragma mark - +#pragma mark Accessors + +@end diff --git a/GPUImage/Source/GPUImageFilterGroup.h b/GPUImage/Source/GPUImageFilterGroup.h new file mode 100755 index 0000000..e188228 --- /dev/null +++ b/GPUImage/Source/GPUImageFilterGroup.h @@ -0,0 +1,19 @@ +#import "GPUImageOutput.h" +#import "GPUImageFilter.h" + +@interface GPUImageFilterGroup : GPUImageOutput +{ + NSMutableArray *filters; + BOOL isEndProcessing; +} + +@property(readwrite, nonatomic, strong) GPUImageOutput *terminalFilter; +@property(readwrite, nonatomic, strong) NSArray *initialFilters; +@property(readwrite, nonatomic, strong) GPUImageOutput *inputFilterToIgnoreForUpdates; + +// Filter management +- (void)addFilter:(GPUImageOutput *)newFilter; +- (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex; +- (NSUInteger)filterCount; + +@end diff --git a/GPUImage/Source/GPUImageFilterGroup.m b/GPUImage/Source/GPUImageFilterGroup.m new file mode 100755 index 0000000..9f46805 --- /dev/null +++ b/GPUImage/Source/GPUImageFilterGroup.m @@ -0,0 +1,235 @@ +#import "GPUImageFilterGroup.h" +#import "GPUImagePicture.h" + +@implementation GPUImageFilterGroup + +@synthesize terminalFilter = _terminalFilter; +@synthesize initialFilters = _initialFilters; +@synthesize inputFilterToIgnoreForUpdates = _inputFilterToIgnoreForUpdates; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + filters = [[NSMutableArray alloc] init]; + + return self; +} + +#pragma mark - +#pragma mark Filter management + +- (void)addFilter:(GPUImageOutput *)newFilter; +{ + [filters addObject:newFilter]; +} + +- (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex; +{ + return [filters objectAtIndex:filterIndex]; +} + +- (NSUInteger)filterCount; +{ + return [filters count]; +} + +#pragma mark - +#pragma mark Still image processing + +- (CGImageRef)newCGImageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +{ + return [self.terminalFilter newCGImageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; +} + +- (void)prepareForImageCapture; +{ + [self.terminalFilter prepareForImageCapture]; +} + +#pragma mark - +#pragma mark GPUImageOutput overrides + +- (void)setTargetToIgnoreForUpdates:(id)targetToIgnoreForUpdates; +{ + [_terminalFilter setTargetToIgnoreForUpdates:targetToIgnoreForUpdates]; +} + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + [_terminalFilter addTarget:newTarget atTextureLocation:textureLocation]; +} + +- (void)removeTarget:(id)targetToRemove; +{ + [_terminalFilter removeTarget:targetToRemove]; +} + +- (void)removeAllTargets; +{ + [_terminalFilter removeAllTargets]; +} + +- (void)setFrameProcessingCompletionBlock:(void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock; +{ + [_terminalFilter setFrameProcessingCompletionBlock:frameProcessingCompletionBlock]; +} + +- (void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock; +{ + return [_terminalFilter frameProcessingCompletionBlock]; +} + +#pragma mark - +#pragma mark GPUImageInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [_initialFilters count]; + + for (GPUImageOutput *currentFilter in _initialFilters) + { + if (currentFilter != self.inputFilterToIgnoreForUpdates) + { + [currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex]; + } + } +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + firstTextureDelegate = newTextureDelegate; + + for (GPUImageOutput *currentFilter in _initialFilters) + { + [currentFilter setTextureDelegate:self atIndex:textureIndex]; + } +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + for (GPUImageOutput *currentFilter in _initialFilters) + { + [currentFilter setInputTexture:newInputTexture atIndex:textureIndex]; + } +} + +- (NSInteger)nextAvailableTextureIndex; +{ +// if ([_initialFilters count] > 0) +// { +// return [[_initialFilters objectAtIndex:0] nextAvailableTextureIndex]; +// } + + return 0; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + for (GPUImageOutput *currentFilter in _initialFilters) + { + [currentFilter setInputSize:newSize atIndex:textureIndex]; + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + for (GPUImageOutput *currentFilter in _initialFilters) + { + [currentFilter setInputRotation:newInputRotation atIndex:(NSInteger)textureIndex]; + } +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + for (GPUImageOutput *currentFilter in filters) + { + [currentFilter forceProcessingAtSize:frameSize]; + } +} + +- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize; +{ + for (GPUImageOutput *currentFilter in filters) + { + [currentFilter forceProcessingAtSizeRespectingAspectRatio:frameSize]; + } +} + +- (CGSize)maximumOutputSize; +{ + // I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better + return CGSizeZero; + + /* + if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero)) + { + for (id currentTarget in _initialFilters) + { + if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width) + { + cachedMaximumOutputSize = [currentTarget maximumOutputSize]; + } + } + } + + return cachedMaximumOutputSize; + */ +} + +- (void)endProcessing; +{ + if (!isEndProcessing) + { + isEndProcessing = YES; + + for (id currentTarget in targets) + { + [currentTarget endProcessing]; + } + } +} + +- (void)conserveMemoryForNextFrame; +{ + for (GPUImageOutput *currentFilter in _initialFilters) + { + [currentFilter conserveMemoryForNextFrame]; + } +} + +- (BOOL)wantsMonochromeInput; +{ + BOOL allInputsWantMonochromeInput = YES; + for (GPUImageOutput *currentFilter in _initialFilters) + { + allInputsWantMonochromeInput = allInputsWantMonochromeInput && [currentFilter wantsMonochromeInput]; + } + + return allInputsWantMonochromeInput; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + for (GPUImageOutput *currentFilter in _initialFilters) + { + [currentFilter setCurrentlyReceivingMonochromeInput:newValue]; + } +} + +#pragma mark - +#pragma mark GPUImageTextureDelegate methods + +- (void)textureNoLongerNeededForTarget:(id)textureTarget; +{ + outputTextureRetainCount--; + if (outputTextureRetainCount < 1) + { + [firstTextureDelegate textureNoLongerNeededForTarget:self]; + } +} + +@end diff --git a/GPUImage/Source/GPUImageFilterPipeline.h b/GPUImage/Source/GPUImageFilterPipeline.h new file mode 100755 index 0000000..90b565f --- /dev/null +++ b/GPUImage/Source/GPUImageFilterPipeline.h @@ -0,0 +1,30 @@ +#import +#import "GPUImageFilter.h" + +@interface GPUImageFilterPipeline : NSObject +{ + NSString *stringValue; +} + +@property (strong) NSMutableArray *filters; + +@property (strong) GPUImageOutput *input; +@property (strong) id output; + +- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id )output; +- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id )output; +- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id )output; + +- (void) addFilter:(GPUImageFilter*)filter; +- (void) addFilter:(GPUImageFilter*)filter atIndex:(NSUInteger)insertIndex; +- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter*)filter; +- (void) replaceAllFilters:(NSArray*) newFilters; +- (void) removeFilterAtIndex:(NSUInteger)index; +- (void) removeAllFilters; + +- (UIImage *) currentFilteredFrame; +- (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation; +- (CGImageRef) newCGImageFromCurrentFilteredFrame; +- (CGImageRef) newCGImageFromCurrentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation; + +@end diff --git a/GPUImage/Source/GPUImageFilterPipeline.m b/GPUImage/Source/GPUImageFilterPipeline.m new file mode 100755 index 0000000..f719c17 --- /dev/null +++ b/GPUImage/Source/GPUImageFilterPipeline.m @@ -0,0 +1,217 @@ +#import "GPUImageFilterPipeline.h" + +@interface GPUImageFilterPipeline () + +- (BOOL)_parseConfiguration:(NSDictionary *)configuration; + +- (void)_refreshFilters; + +@end + +@implementation GPUImageFilterPipeline + +@synthesize filters = _filters, input = _input, output = _output; + +#pragma mark Config file init + +- (id)initWithConfiguration:(NSDictionary *)configuration input:(GPUImageOutput *)input output:(id )output { + self = [super init]; + if (self) { + self.input = input; + self.output = output; + if (![self _parseConfiguration:configuration]) { + NSLog(@"Sorry, a parsing error occurred."); + abort(); + } + [self _refreshFilters]; + } + return self; +} + +- (id)initWithConfigurationFile:(NSURL *)configuration input:(GPUImageOutput *)input output:(id )output { + return [self initWithConfiguration:[NSDictionary dictionaryWithContentsOfURL:configuration] input:input output:output]; +} + +- (BOOL)_parseConfiguration:(NSDictionary *)configuration { + NSArray *filters = [configuration objectForKey:@"Filters"]; + if (!filters) { + return NO; + } + + NSError *regexError = nil; + NSRegularExpression *parsingRegex = [NSRegularExpression regularExpressionWithPattern:@"(float|CGPoint|NSString)\\((.*?)(?:,\\s*(.*?))*\\)" + options:0 + error:®exError]; + + // It's faster to put them into an array and then pass it to the filters property than it is to call [self addFilter:] every time + NSMutableArray *orderedFilters = [NSMutableArray arrayWithCapacity:[filters count]]; + for (NSDictionary *filter in filters) { + NSString *filterName = [filter objectForKey:@"FilterName"]; + Class theClass = NSClassFromString(filterName); + GPUImageFilter *genericFilter = [[theClass alloc] init]; + // Set up the properties + NSDictionary *filterAttributes; + if ((filterAttributes = [filter objectForKey:@"Attributes"])) { + for (NSString *propertyKey in filterAttributes) { + // Set up the selector + SEL theSelector = NSSelectorFromString(propertyKey); + NSInvocation *inv = [NSInvocation invocationWithMethodSignature:[theClass instanceMethodSignatureForSelector:theSelector]]; + [inv setSelector:theSelector]; + [inv setTarget:genericFilter]; + + // check selector given with parameter + if ([propertyKey hasSuffix:@":"]) { + + stringValue = nil; + + // Then parse the arguments + NSMutableArray *parsedArray; + if ([[filterAttributes objectForKey:propertyKey] isKindOfClass:[NSArray class]]) { + NSArray *array = [filterAttributes objectForKey:propertyKey]; + parsedArray = [NSMutableArray arrayWithCapacity:[array count]]; + for (NSString *string in array) { + NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string + options:0 + range:NSMakeRange(0, [string length])]; + + NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]]; + if ([modifier isEqualToString:@"float"]) { + // Float modifier, one argument + CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue]; + [parsedArray addObject:[NSNumber numberWithFloat:value]]; + [inv setArgument:&value atIndex:2]; + } else if ([modifier isEqualToString:@"CGPoint"]) { + // CGPoint modifier, two float arguments + CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue]; + CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue]; + CGPoint value = CGPointMake(x, y); + [parsedArray addObject:[NSValue valueWithCGPoint:value]]; + } else if ([modifier isEqualToString:@"NSString"]) { + // NSString modifier, one string argument + stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy]; + [inv setArgument:&stringValue atIndex:2]; + + } else { + return NO; + } + } + [inv setArgument:&parsedArray atIndex:2]; + } else { + NSString *string = [filterAttributes objectForKey:propertyKey]; + NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string + options:0 + range:NSMakeRange(0, [string length])]; + + NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]]; + if ([modifier isEqualToString:@"float"]) { + // Float modifier, one argument + CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue]; + [inv setArgument:&value atIndex:2]; + } else if ([modifier isEqualToString:@"CGPoint"]) { + // CGPoint modifier, two float arguments + CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue]; + CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue]; + CGPoint value = CGPointMake(x, y); + [inv setArgument:&value atIndex:2]; + } else if ([modifier isEqualToString:@"NSString"]) { + // NSString modifier, one string argument + stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy]; + [inv setArgument:&stringValue atIndex:2]; + + } else { + return NO; + } + } + } + + + [inv invoke]; + } + } + [orderedFilters addObject:genericFilter]; + } + self.filters = orderedFilters; + + return YES; +} + +#pragma mark Regular init + +- (id)initWithOrderedFilters:(NSArray *)filters input:(GPUImageOutput *)input output:(id )output { + self = [super init]; + if (self) { + self.input = input; + self.output = output; + self.filters = [NSMutableArray arrayWithArray:filters]; + [self _refreshFilters]; + } + return self; +} + +- (void)addFilter:(GPUImageFilter *)filter atIndex:(NSUInteger)insertIndex { + [self.filters insertObject:filter atIndex:insertIndex]; + [self _refreshFilters]; +} + +- (void)addFilter:(GPUImageFilter *)filter { + [self.filters addObject:filter]; + [self _refreshFilters]; +} + +- (void)replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageFilter *)filter { + [self.filters replaceObjectAtIndex:index withObject:filter]; + [self _refreshFilters]; +} + +- (void)removeFilterAtIndex:(NSUInteger)index { + [self.filters removeObjectAtIndex:index]; + [self _refreshFilters]; +} + +- (void)removeAllFilters { + [self.filters removeAllObjects]; + [self _refreshFilters]; +} + +- (void)replaceAllFilters:(NSArray *)newFilters { + self.filters = [NSMutableArray arrayWithArray:newFilters]; + [self _refreshFilters]; +} + +- (void)_refreshFilters { + + id prevFilter = self.input; + GPUImageFilter *theFilter = nil; + + for (int i = 0; i < [self.filters count]; i++) { + theFilter = [self.filters objectAtIndex:i]; + [prevFilter removeAllTargets]; + [prevFilter addTarget:theFilter]; + prevFilter = theFilter; + } + + [prevFilter removeAllTargets]; + + if (self.output != nil) { + [prevFilter addTarget:self.output]; + } +} + +- (UIImage *)currentFilteredFrame { + return [(GPUImageFilter *)[_filters lastObject] imageFromCurrentlyProcessedOutput]; +} + +- (UIImage *)currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation { + return [(GPUImageFilter *)[_filters lastObject] imageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; +} + +- (CGImageRef)newCGImageFromCurrentFilteredFrame { + return [(GPUImageFilter *)[_filters lastObject] newCGImageFromCurrentlyProcessedOutput]; +} + +- (CGImageRef)newCGImageFromCurrentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation { + return [(GPUImageFilter *)[_filters lastObject] newCGImageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; +} + + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageGammaFilter.h b/GPUImage/Source/GPUImageGammaFilter.h new file mode 100755 index 0000000..0521d08 --- /dev/null +++ b/GPUImage/Source/GPUImageGammaFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageFilter.h" + +@interface GPUImageGammaFilter : GPUImageFilter +{ + GLint gammaUniform; +} + +// Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level +@property(readwrite, nonatomic) CGFloat gamma; + +@end diff --git a/GPUImage/Source/GPUImageGammaFilter.m b/GPUImage/Source/GPUImageGammaFilter.m new file mode 100755 index 0000000..35adaba --- /dev/null +++ b/GPUImage/Source/GPUImageGammaFilter.m @@ -0,0 +1,66 @@ +#import "GPUImageGammaFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float gamma; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w); + } +); +#else +NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float gamma; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w); + } +); +#endif + +@implementation GPUImageGammaFilter + +@synthesize gamma = _gamma; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageGammaFragmentShaderString])) + { + return nil; + } + + gammaUniform = [filterProgram uniformIndex:@"gamma"]; + self.gamma = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setGamma:(CGFloat)newValue; +{ + _gamma = newValue; + + [self setFloat:_gamma forUniform:gammaUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageGaussianBlurFilter.h b/GPUImage/Source/GPUImageGaussianBlurFilter.h new file mode 100755 index 0000000..dc2bb78 --- /dev/null +++ b/GPUImage/Source/GPUImageGaussianBlurFilter.h @@ -0,0 +1,36 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +/** A Gaussian blur filter + Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/ + */ + +@interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter +{ + BOOL shouldResizeBlurRadiusWithImageSize; + CGFloat _blurRadiusInPixels; +} + +/** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result. + */ +@property (readwrite, nonatomic) CGFloat texelSpacingMultiplier; + +/** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function. + */ +@property (readwrite, nonatomic) CGFloat blurRadiusInPixels; + +/** Setting these properties will allow the blur radius to scale with the size of the image + */ +@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth; +@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight; + +/// The number of times to sequentially blur the incoming image. The more passes, the slower the filter. +@property(readwrite, nonatomic) NSUInteger blurPasses; + ++ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; ++ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; ++ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; ++ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; + +- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader; + +@end diff --git a/GPUImage/Source/GPUImageGaussianBlurFilter.m b/GPUImage/Source/GPUImageGaussianBlurFilter.m new file mode 100755 index 0000000..bee907f --- /dev/null +++ b/GPUImage/Source/GPUImageGaussianBlurFilter.m @@ -0,0 +1,484 @@ +#import "GPUImageGaussianBlurFilter.h" + +@implementation GPUImageGaussianBlurFilter + +@synthesize texelSpacingMultiplier = _texelSpacingMultiplier; +@synthesize blurRadiusInPixels = _blurRadiusInPixels; +@synthesize blurRadiusAsFractionOfImageWidth = _blurRadiusAsFractionOfImageWidth; +@synthesize blurRadiusAsFractionOfImageHeight = _blurRadiusAsFractionOfImageHeight; +@synthesize blurPasses = _blurPasses; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString +{ + if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString])) + { + return nil; + } + + self.texelSpacingMultiplier = 1.0; + _blurRadiusInPixels = 2.0; + shouldResizeBlurRadiusWithImageSize = NO; + + return self; +} + +- (id)init; +{ + NSString *currentGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:2.0]; + NSString *currentGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:2.0]; + + return [self initWithFirstStageVertexShaderFromString:currentGaussianBlurVertexShader firstStageFragmentShaderFromString:currentGaussianBlurFragmentShader secondStageVertexShaderFromString:currentGaussianBlurVertexShader secondStageFragmentShaderFromString:currentGaussianBlurFragmentShader]; +} + +#pragma mark - +#pragma mark Auto-generation of optimized Gaussian shaders + +// "Implementation limit of 32 varying components exceeded" - Max number of varyings for these GPUs + ++ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImageVertexShaderString; + } + +// NSLog(@"Max varyings: %d", [GPUImageContext maximumVaryingVectorsForThisDevice]); + NSMutableString *shaderString = [[NSMutableString alloc] init]; + + // Header + [shaderString appendFormat:@"\ + attribute vec4 position;\n\ + attribute vec4 inputTextureCoordinate;\n\ + \n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + gl_Position = position;\n\ + \n\ + vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(blurRadius * 2 + 1) ]; + + // Inner offset loop + for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++) + { + NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius; + if (offsetFromCenter < 0) + { + [shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(-offsetFromCenter)]; + } + else if (offsetFromCenter > 0) + { + [shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy + singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(offsetFromCenter)]; + } + else + { + [shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy;\n", (unsigned long)currentBlurCoordinateIndex]; + } + } + + // Footer + [shaderString appendString:@"}\n"]; + + return shaderString; +} + ++ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImagePassthroughFragmentShaderString; + } + + // First, generate the normal Gaussian weights for a given sigma + GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat)); + GLfloat sumOfWeights = 0.0; + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0))); + + if (currentGaussianWeightIndex == 0) + { + sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex]; + } + else + { + sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex]; + } + } + + // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights; + } + + // Finally, generate the shader from these weights + NSMutableString *shaderString = [[NSMutableString alloc] init]; + + // Header +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + \n\ + varying highp vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + lowp vec4 sum = vec4(0.0);\n", (unsigned long)(blurRadius * 2 + 1) ]; +#else + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + vec4 sum = vec4(0.0);\n", (blurRadius * 2 + 1) ]; +#endif + + // Inner texture loop + for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++) + { + NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius; + if (offsetFromCenter < 0) + { + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[-offsetFromCenter]]; + } + else + { + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[offsetFromCenter]]; + } + } + + // Footer + [shaderString appendString:@"\ + gl_FragColor = sum;\n\ + }\n"]; + + free(standardGaussianWeights); + return shaderString; +} + ++ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImageVertexShaderString; + } + + // First, generate the normal Gaussian weights for a given sigma + GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat)); + GLfloat sumOfWeights = 0.0; + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0))); + + if (currentGaussianWeightIndex == 0) + { + sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex]; + } + else + { + sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex]; + } + } + + // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights; + } + + // From these weights we calculate the offsets to read interpolated values from + NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7); + GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat)); + + for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++) + { + GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1]; + GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2]; + + GLfloat optimizedWeight = firstWeight + secondWeight; + + optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight; + } + + NSMutableString *shaderString = [[NSMutableString alloc] init]; + // Header + [shaderString appendFormat:@"\ + attribute vec4 position;\n\ + attribute vec4 inputTextureCoordinate;\n\ + \n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + gl_Position = position;\n\ + \n\ + vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))]; + + // Inner offset loop + [shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"]; + for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++) + { + [shaderString appendFormat:@"\ + blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\ + blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]]; + } + + // Footer + [shaderString appendString:@"}\n"]; + + free(optimizedGaussianOffsets); + free(standardGaussianWeights); + return shaderString; +} + ++ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImagePassthroughFragmentShaderString; + } + + // First, generate the normal Gaussian weights for a given sigma + GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat)); + GLfloat sumOfWeights = 0.0; + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0))); + + if (currentGaussianWeightIndex == 0) + { + sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex]; + } + else + { + sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex]; + } + } + + // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights; + } + + // From these weights we calculate the offsets to read interpolated values from + NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7); + NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2); + + NSMutableString *shaderString = [[NSMutableString alloc] init]; + + // Header +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + uniform highp float texelWidthOffset;\n\ + uniform highp float texelHeightOffset;\n\ + \n\ + varying highp vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ]; +#else + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + vec4 sum = vec4(0.0);\n", 1 + (numberOfOptimizedOffsets * 2) ]; +#endif + + // Inner texture loop + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\n", standardGaussianWeights[0]]; + + for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++) + { + GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1]; + GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2]; + GLfloat optimizedWeight = firstWeight + secondWeight; + + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight]; + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight]; + } + + // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader + if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"]; +#else + [shaderString appendString:@"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"]; +#endif + + for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++) + { + GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1]; + GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2]; + + GLfloat optimizedWeight = firstWeight + secondWeight; + GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight; + + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight]; + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight]; + } + } + + // Footer + [shaderString appendString:@"\ + gl_FragColor = sum;\n\ + }\n"]; + + free(standardGaussianWeights); + return shaderString; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + [super setupFilterForSize:filterFrameSize]; + + if (shouldResizeBlurRadiusWithImageSize == YES) + { + + } +} + +#pragma mark - +#pragma mark Rendering + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates sourceTexture:sourceTexture]; + + for (NSUInteger currentAdditionalBlurPass = 1; currentAdditionalBlurPass < _blurPasses; currentAdditionalBlurPass++) + { + [super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation] sourceTexture:secondFilterOutputTexture]; + } +} + +- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader]; + + if (!filterProgram.initialized) + { + [self initializeAttributes]; + + if (![filterProgram link]) + { + NSString *progLog = [filterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [filterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [filterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + filterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + filterPositionAttribute = [filterProgram attributeIndex:@"position"]; + filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"]; + filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader + verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"]; + verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"]; + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glEnableVertexAttribArray(filterPositionAttribute); + glEnableVertexAttribArray(filterTextureCoordinateAttribute); + + secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader]; + + if (!secondFilterProgram.initialized) + { + [self initializeSecondaryAttributes]; + + if (![secondFilterProgram link]) + { + NSString *progLog = [secondFilterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [secondFilterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [secondFilterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + secondFilterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"]; + secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"]; + secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader + secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader + horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"]; + horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"]; + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + + glEnableVertexAttribArray(secondFilterPositionAttribute); + glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute); + + [self setupFilterForSize:[self sizeOfFBO]]; + glFinish(); + }); + +} + +#pragma mark - +#pragma mark Accessors + +- (void)setTexelSpacingMultiplier:(CGFloat)newValue; +{ + _texelSpacingMultiplier = newValue; + + _verticalTexelSpacing = _texelSpacingMultiplier; + _horizontalTexelSpacing = _texelSpacingMultiplier; + + [self setupFilterForSize:[self sizeOfFBO]]; +} + +// inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + // 7.0 is the limit for blur size for hardcoded varying offsets + + if (round(newValue) != _blurRadiusInPixels) + { + _blurRadiusInPixels = round(newValue); // For now, only do integral sigmas + + // Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel + CGFloat minimumWeightToFindEdgeOfSamplingArea = 1.0/256.0; + NSUInteger calculatedSampleRadius = floor(sqrt(-2.0 * pow(_blurRadiusInPixels, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(_blurRadiusInPixels, 2.0))) )); + calculatedSampleRadius += calculatedSampleRadius % 2; // There's nothing to gain from handling odd radius sizes, due to the optimizations I use + +// NSLog(@"Blur radius: %f, calculated sample radius: %d", _blurRadiusInPixels, calculatedSampleRadius); +// + NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels]; + NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels]; + +// NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader); +// NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader); +// + [self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader]; + } + shouldResizeBlurRadiusWithImageSize = NO; +} + +@end diff --git a/GPUImage/Source/GPUImageGaussianBlurPositionFilter.h b/GPUImage/Source/GPUImageGaussianBlurPositionFilter.h new file mode 100755 index 0000000..dc88a56 --- /dev/null +++ b/GPUImage/Source/GPUImageGaussianBlurPositionFilter.h @@ -0,0 +1,22 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +/** A more generalized 9x9 Gaussian blur filter + */ +@interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter +{ + GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform; +} + +/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0 + */ +@property (readwrite, nonatomic) CGFloat blurSize; + +/** Center for the blur, defaults to 0.5, 0.5 + */ +@property (readwrite, nonatomic) CGPoint blurCenter; + +/** Radius for the blur, defaults to 1.0 + */ +@property (readwrite, nonatomic) CGFloat blurRadius; + +@end diff --git a/GPUImage/Source/GPUImageGaussianBlurPositionFilter.m b/GPUImage/Source/GPUImageGaussianBlurPositionFilter.m new file mode 100755 index 0000000..8ecd924 --- /dev/null +++ b/GPUImage/Source/GPUImageGaussianBlurPositionFilter.m @@ -0,0 +1,232 @@ +#import "GPUImageGaussianBlurPositionFilter.h" + +NSString *const kGPUImageGaussianBlurPositionVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + const int GAUSSIAN_SAMPLES = 9; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + varying vec2 textureCoordinate; + varying vec2 blurCoordinates[GAUSSIAN_SAMPLES]; + + void main() + { + gl_Position = position; + textureCoordinate = inputTextureCoordinate.xy; + + // Calculate the positions for the blur + int multiplier = 0; + vec2 blurStep; + vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset); + + for (int i = 0; i < GAUSSIAN_SAMPLES; i++) { + multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2)); + // Blur in x (horizontal) + blurStep = float(multiplier) * singleStepOffset; + blurCoordinates[i] = inputTextureCoordinate.xy + blurStep; + } + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + const lowp int GAUSSIAN_SAMPLES = 9; + + varying highp vec2 textureCoordinate; + varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES]; + + uniform highp float aspectRatio; + uniform lowp vec2 blurCenter; + uniform highp float blurRadius; + + void main() { + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float dist = distance(blurCenter, textureCoordinateToUse); + + if (dist < blurRadius) + { + lowp vec4 sum = vec4(0.0); + + sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05; + sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09; + sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12; + sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15; + sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18; + sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15; + sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12; + sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09; + sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05; + + gl_FragColor = sum; + } + else + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } + } +); +#else +NSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + const int GAUSSIAN_SAMPLES = 9; + + varying vec2 textureCoordinate; + varying vec2 blurCoordinates[GAUSSIAN_SAMPLES]; + + uniform float aspectRatio; + uniform vec2 blurCenter; + uniform float blurRadius; + + void main() + { + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float dist = distance(blurCenter, textureCoordinateToUse); + + if (dist < blurRadius) + { + vec4 sum = vec4(0.0); + + sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05; + sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09; + sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12; + sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15; + sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18; + sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15; + sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12; + sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09; + sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05; + + gl_FragColor = sum; + } + else + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } + } +); +#endif + +@interface GPUImageGaussianBlurPositionFilter () + +- (void)adjustAspectRatio; + +@property (readwrite, nonatomic) CGFloat aspectRatio; + +@end + +@implementation GPUImageGaussianBlurPositionFilter + +@synthesize blurSize = _blurSize; +@synthesize blurCenter = _blurCenter; +@synthesize aspectRatio = _aspectRatio; + +- (id) initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString + firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString + secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString + secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString { + + if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString ? firstStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString + firstStageFragmentShaderFromString:firstStageFragmentShaderString ? firstStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString + secondStageVertexShaderFromString:secondStageVertexShaderString ? secondStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString + secondStageFragmentShaderFromString:secondStageFragmentShaderString ? secondStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString])) { + return nil; + } + + aspectRatioUniform = [secondFilterProgram uniformIndex:@"aspectRatio"]; + blurCenterUniform = [secondFilterProgram uniformIndex:@"blurCenter"]; + blurRadiusUniform = [secondFilterProgram uniformIndex:@"blurRadius"]; + + self.blurSize = 1.0; + self.blurRadius = 1.0; + self.blurCenter = CGPointMake(0.5, 0.5); + + return self; +} + +- (id)init; +{ + return [self initWithFirstStageVertexShaderFromString:nil + firstStageFragmentShaderFromString:nil + secondStageVertexShaderFromString:nil + secondStageFragmentShaderFromString:nil]; +} + +- (void)adjustAspectRatio; +{ + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)]; + } + else + { + [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)]; + } +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + [self adjustAspectRatio]; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self adjustAspectRatio]; + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setBlurCenter:self.blurCenter]; + [self adjustAspectRatio]; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurSize:(CGFloat)newValue; +{ + _blurSize = newValue; + + _verticalTexelSpacing = _blurSize; + _horizontalTexelSpacing = _blurSize; + + [self setupFilterForSize:[self sizeOfFBO]]; +} + +- (void) setBlurCenter:(CGPoint)blurCenter; +{ + _blurCenter = blurCenter; + CGPoint rotatedPoint = [self rotatedPoint:blurCenter forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:blurCenterUniform program:secondFilterProgram]; +} + +- (void) setBlurRadius:(CGFloat)blurRadius; +{ + _blurRadius = blurRadius; + + [self setFloat:_blurRadius forUniform:blurRadiusUniform program:secondFilterProgram]; +} + +- (void) setAspectRatio:(CGFloat)newValue; +{ + _aspectRatio = newValue; + + [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:secondFilterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageGaussianSelectiveBlurFilter.h b/GPUImage/Source/GPUImageGaussianSelectiveBlurFilter.h new file mode 100755 index 0000000..0232456 --- /dev/null +++ b/GPUImage/Source/GPUImageGaussianSelectiveBlurFilter.h @@ -0,0 +1,30 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageGaussianBlurFilter; + +/** A Gaussian blur that preserves focus within a circular region + */ +@interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup +{ + GPUImageGaussianBlurFilter *blurFilter; + GPUImageFilter *selectiveFocusFilter; + BOOL hasOverriddenAspectRatio; +} + +/** The radius of the circular area being excluded from the blur + */ +@property (readwrite, nonatomic) CGFloat excludeCircleRadius; +/** The center of the circular area being excluded from the blur + */ +@property (readwrite, nonatomic) CGPoint excludeCirclePoint; +/** The size of the area between the blurred portion and the clear circle + */ +@property (readwrite, nonatomic) CGFloat excludeBlurSize; +/** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function. + */ +@property (readwrite, nonatomic) CGFloat blurRadiusInPixels; +/** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value. + */ +@property (readwrite, nonatomic) CGFloat aspectRatio; + +@end diff --git a/GPUImage/Source/GPUImageGaussianSelectiveBlurFilter.m b/GPUImage/Source/GPUImageGaussianSelectiveBlurFilter.m new file mode 100755 index 0000000..7ebc9e1 --- /dev/null +++ b/GPUImage/Source/GPUImageGaussianSelectiveBlurFilter.m @@ -0,0 +1,147 @@ +#import "GPUImageGaussianSelectiveBlurFilter.h" +#import "GPUImageGaussianBlurFilter.h" +#import "GPUImageTwoInputFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform lowp float excludeCircleRadius; + uniform lowp vec2 excludeCirclePoint; + uniform lowp float excludeBlurSize; + uniform highp float aspectRatio; + + void main() + { + lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2); + + highp vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse); + + gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter)); + } +); +#else +NSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform float excludeCircleRadius; + uniform vec2 excludeCirclePoint; + uniform float excludeBlurSize; + uniform float aspectRatio; + + void main() + { + vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate); + vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2); + + vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse); + + gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter)); + } +); +#endif + +@implementation GPUImageGaussianSelectiveBlurFilter + +@synthesize excludeCirclePoint = _excludeCirclePoint, excludeCircleRadius = _excludeCircleRadius, excludeBlurSize = _excludeBlurSize; +@synthesize blurRadiusInPixels = _blurRadiusInPixels; +@synthesize aspectRatio = _aspectRatio; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + hasOverriddenAspectRatio = NO; + + // First pass: apply a variable Gaussian blur + blurFilter = [[GPUImageGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + + // Second pass: combine the blurred image with the original sharp one + selectiveFocusFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageGaussianSelectiveBlurFragmentShaderString]; + [self addFilter:selectiveFocusFilter]; + + // Texture location 0 needs to be the sharp image for both the blur and the second stage processing + [blurFilter addTarget:selectiveFocusFilter atTextureLocation:1]; + + // To prevent double updating of this filter, disable updates from the sharp image side + self.initialFilters = [NSArray arrayWithObjects:blurFilter, selectiveFocusFilter, nil]; + self.terminalFilter = selectiveFocusFilter; + + self.blurRadiusInPixels = 5.0; + + self.excludeCircleRadius = 60.0/320.0; + self.excludeCirclePoint = CGPointMake(0.5f, 0.5f); + self.excludeBlurSize = 30.0/320.0; + + return self; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + inputTextureSize = newSize; + + if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!hasOverriddenAspectRatio) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + _aspectRatio = (inputTextureSize.width / inputTextureSize.height); + [selectiveFocusFilter setFloat:_aspectRatio forUniformName:@"aspectRatio"]; + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setExcludeCirclePoint:(CGPoint)newValue; +{ + _excludeCirclePoint = newValue; + [selectiveFocusFilter setPoint:newValue forUniformName:@"excludeCirclePoint"]; +} + +- (void)setExcludeCircleRadius:(CGFloat)newValue; +{ + _excludeCircleRadius = newValue; + [selectiveFocusFilter setFloat:newValue forUniformName:@"excludeCircleRadius"]; +} + +- (void)setExcludeBlurSize:(CGFloat)newValue; +{ + _excludeBlurSize = newValue; + [selectiveFocusFilter setFloat:newValue forUniformName:@"excludeBlurSize"]; +} + +- (void)setAspectRatio:(CGFloat)newValue; +{ + hasOverriddenAspectRatio = YES; + _aspectRatio = newValue; + [selectiveFocusFilter setFloat:_aspectRatio forUniformName:@"aspectRatio"]; +} + +@end diff --git a/GPUImage/Source/GPUImageGlassSphereFilter.h b/GPUImage/Source/GPUImageGlassSphereFilter.h new file mode 100644 index 0000000..809a4ee --- /dev/null +++ b/GPUImage/Source/GPUImageGlassSphereFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageSphereRefractionFilter.h" + +@interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter + +@end diff --git a/GPUImage/Source/GPUImageGlassSphereFilter.m b/GPUImage/Source/GPUImageGlassSphereFilter.m new file mode 100644 index 0000000..1866291 --- /dev/null +++ b/GPUImage/Source/GPUImageGlassSphereFilter.m @@ -0,0 +1,106 @@ +#import "GPUImageGlassSphereFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp vec2 center; + uniform highp float radius; + uniform highp float aspectRatio; + uniform highp float refractiveIndex; +// uniform vec3 lightPosition; + const highp vec3 lightPosition = vec3(-0.5, 0.5, 1.0); + const highp vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0); + + void main() + { + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float distanceFromCenter = distance(center, textureCoordinateToUse); + lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius); + + distanceFromCenter = distanceFromCenter / radius; + + highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter); + highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth)); + + highp vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex); + refractedVector.xy = -refractedVector.xy; + + highp vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb; + + // Grazing angle lighting + highp float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25)); + finalSphereColor += lightingIntensity; + + // Specular lighting + lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0); + lightingIntensity = pow(lightingIntensity, 15.0); + finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity; + + gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere; + } +); +#else +NSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform vec2 center; + uniform float radius; + uniform float aspectRatio; + uniform float refractiveIndex; + // uniform vec3 lightPosition; + const vec3 lightPosition = vec3(-0.5, 0.5, 1.0); + const vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0); + + void main() + { + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float distanceFromCenter = distance(center, textureCoordinateToUse); + float checkForPresenceWithinSphere = step(distanceFromCenter, radius); + + distanceFromCenter = distanceFromCenter / radius; + + float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter); + vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth)); + + vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex); + refractedVector.xy = -refractedVector.xy; + + vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb; + + // Grazing angle lighting + float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25)); + finalSphereColor += lightingIntensity; + + // Specular lighting + lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0); + lightingIntensity = pow(lightingIntensity, 15.0); + finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity; + + gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere; + } +); +#endif + +@implementation GPUImageGlassSphereFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageGlassSphereFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageGrayscaleFilter.h b/GPUImage/Source/GPUImageGrayscaleFilter.h new file mode 100755 index 0000000..2d97f8c --- /dev/null +++ b/GPUImage/Source/GPUImageGrayscaleFilter.h @@ -0,0 +1,9 @@ +#import "GPUImageFilter.h" + +extern NSString *const kGPUImageLuminanceFragmentShaderString; + +/** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution) + */ +@interface GPUImageGrayscaleFilter : GPUImageFilter + +@end diff --git a/GPUImage/Source/GPUImageGrayscaleFilter.m b/GPUImage/Source/GPUImageGrayscaleFilter.m new file mode 100755 index 0000000..0cdafad --- /dev/null +++ b/GPUImage/Source/GPUImageGrayscaleFilter.m @@ -0,0 +1,140 @@ +#import "GPUImageGrayscaleFilter.h" + +@implementation GPUImageGrayscaleFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, W); + + gl_FragColor = vec4(vec3(luminance), textureColor.a); + } +); +#else +NSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, W); + + gl_FragColor = vec4(vec3(luminance), textureColor.a); + } +); +#endif + + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (!currentlyReceivingMonochromeInput) + { + [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates sourceTexture:sourceTexture]; + } +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + [super setInputTexture:newInputTexture atIndex:textureIndex]; + if (currentlyReceivingMonochromeInput) + { + [self notifyTargetsAboutNewOutputTexture]; + } +} + +- (GLuint)textureForOutput; +{ + if (currentlyReceivingMonochromeInput) + { + return filterSourceTexture; + } + else + { + return outputTexture; + } +} + +- (BOOL)wantsMonochromeInput; +{ + return YES; +} + +- (BOOL)providesMonochromeOutput; +{ + return YES; +} + +- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime; +{ + if (self.frameProcessingCompletionBlock != NULL) + { + self.frameProcessingCompletionBlock(self, frameTime); + } + + [self releaseInputTexturesIfNeeded]; + + for (id currentTarget in targets) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + [self setInputTextureForTarget:currentTarget atIndex:textureIndex]; + } + + if (currentlyReceivingMonochromeInput) + { + [currentTarget setInputRotation:inputRotation atIndex:textureIndex]; + + CGSize sizeToRotate = [self outputFrameSize]; + CGSize rotatedSize = sizeToRotate; + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + rotatedSize.width = sizeToRotate.height; + rotatedSize.height = sizeToRotate.width; + } + [currentTarget setInputSize:rotatedSize atIndex:textureIndex]; + } + else + { + [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex]; + } + [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex]; + } + } +} + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString])) + { + return nil; + } + + return self; +} + + +@end diff --git a/GPUImage/Source/GPUImageHSBFilter.h b/GPUImage/Source/GPUImageHSBFilter.h new file mode 100644 index 0000000..65a9e1d --- /dev/null +++ b/GPUImage/Source/GPUImageHSBFilter.h @@ -0,0 +1,27 @@ +#import "GPUImageColorMatrixFilter.h" + +@interface GPUImageHSBFilter : GPUImageColorMatrixFilter + +/** Reset the filter to have no transformations. + */ +- (void)reset; + +/** Add a hue rotation to the filter. + The hue rotation is in the range [-360, 360] with 0 being no-change. + Note that this adjustment is additive, so use the reset method if you need to. + */ +- (void)rotateHue:(float)h; + +/** Add a saturation adjustment to the filter. + The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change. + Note that this adjustment is additive, so use the reset method if you need to. + */ +- (void)adjustSaturation:(float)s; + +/** Add a brightness adjustment to the filter. + The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change. + Note that this adjustment is additive, so use the reset method if you need to. + */ +- (void)adjustBrightness:(float)b; + +@end diff --git a/GPUImage/Source/GPUImageHSBFilter.m b/GPUImage/Source/GPUImageHSBFilter.m new file mode 100644 index 0000000..eb668f7 --- /dev/null +++ b/GPUImage/Source/GPUImageHSBFilter.m @@ -0,0 +1,414 @@ +#import "GPUImageHSBFilter.h" + +@implementation GPUImageHSBFilter { + float matrix[4][4]; +} + +- (id)init +{ + self = [super init]; + if (self) { + [self reset]; + } + return self; +} + +- (void)reset { + identmat(matrix); + [self _updateColorMatrix]; +} + +- (void)rotateHue:(float)h { + huerotatemat(matrix, h); + [self _updateColorMatrix]; +} + +- (void)adjustSaturation:(float)s { + saturatemat(matrix, s); + [self _updateColorMatrix]; +} + +- (void)adjustBrightness:(float)b { + cscalemat(matrix, b, b, b); + [self _updateColorMatrix]; +} + +- (void)_updateColorMatrix { + GPUMatrix4x4 gpuMatrix; + gpuMatrix.one.one = matrix[0][0]; + gpuMatrix.one.two = matrix[1][0]; + gpuMatrix.one.three = matrix[2][0]; + gpuMatrix.one.four = matrix[3][0]; + gpuMatrix.two.one = matrix[0][1]; + gpuMatrix.two.two = matrix[1][1]; + gpuMatrix.two.three = matrix[2][1]; + gpuMatrix.two.four = matrix[3][1]; + gpuMatrix.three.one = matrix[0][2]; + gpuMatrix.three.two = matrix[1][2]; + gpuMatrix.three.three = matrix[2][2]; + gpuMatrix.three.four = matrix[3][2]; + gpuMatrix.four.one = matrix[0][3]; + gpuMatrix.four.two = matrix[1][3]; + gpuMatrix.four.three = matrix[2][3]; + gpuMatrix.four.four = matrix[3][3]; + self.colorMatrix = gpuMatrix; +} + +#pragma mark - Matrix algorithms + +/* Matrix algorithms adapted from http://www.graficaobscura.com/matrix/index.html + + Note about luminance vector values below from that page: + Where rwgt is 0.3086, gwgt is 0.6094, and bwgt is 0.0820. This is the luminance vector. Notice here that we do not use the standard NTSC weights of 0.299, 0.587, and 0.114. The NTSC weights are only applicable to RGB colors in a gamma 2.2 color space. For linear RGB colors the values above are better. + */ +//#define RLUM (0.3086f) +//#define GLUM (0.6094f) +//#define BLUM (0.0820f) + +/* This is the vector value from the PDF specification, and may be closer to what Photoshop uses */ +#define RLUM (0.3f) +#define GLUM (0.59f) +#define BLUM (0.11f) + +/* + * matrixmult - + * multiply two matricies + */ +static void matrixmult(a,b,c) +float a[4][4], b[4][4], c[4][4]; +{ + int x, y; + float temp[4][4]; + + for(y=0; y<4 ; y++) + for(x=0 ; x<4 ; x++) { + temp[y][x] = b[y][0] * a[0][x] + + b[y][1] * a[1][x] + + b[y][2] * a[2][x] + + b[y][3] * a[3][x]; + } + for(y=0; y<4; y++) + for(x=0; x<4; x++) + c[y][x] = temp[y][x]; +} + +/* + * identmat - + * make an identity matrix + */ +static void identmat(matrix) +float matrix[4][4]; +{ + memset(matrix, 0, sizeof(float[4][4])); + matrix[0][0] = 1.0f; + matrix[1][1] = 1.0f; + matrix[2][2] = 1.0f; + matrix[3][3] = 1.0f; +} + +/* + * xformpnt - + * transform a 3D point using a matrix + */ +static void xformpnt(matrix,x,y,z,tx,ty,tz) +float matrix[4][4]; +float x,y,z; +float *tx,*ty,*tz; +{ + *tx = x*matrix[0][0] + y*matrix[1][0] + z*matrix[2][0] + matrix[3][0]; + *ty = x*matrix[0][1] + y*matrix[1][1] + z*matrix[2][1] + matrix[3][1]; + *tz = x*matrix[0][2] + y*matrix[1][2] + z*matrix[2][2] + matrix[3][2]; +} + +/* + * cscalemat - + * make a color scale marix + */ +static void cscalemat(mat,rscale,gscale,bscale) +float mat[4][4]; +float rscale, gscale, bscale; +{ + float mmat[4][4]; + + mmat[0][0] = rscale; + mmat[0][1] = 0.0; + mmat[0][2] = 0.0; + mmat[0][3] = 0.0; + + mmat[1][0] = 0.0; + mmat[1][1] = gscale; + mmat[1][2] = 0.0; + mmat[1][3] = 0.0; + + + mmat[2][0] = 0.0; + mmat[2][1] = 0.0; + mmat[2][2] = bscale; + mmat[2][3] = 0.0; + + mmat[3][0] = 0.0; + mmat[3][1] = 0.0; + mmat[3][2] = 0.0; + mmat[3][3] = 1.0; + matrixmult(mmat,mat,mat); +} + +/* + * saturatemat - + * make a saturation marix + */ +static void saturatemat(mat,sat) +float mat[4][4]; +float sat; +{ + float mmat[4][4]; + float a, b, c, d, e, f, g, h, i; + float rwgt, gwgt, bwgt; + + rwgt = RLUM; + gwgt = GLUM; + bwgt = BLUM; + + a = (1.0-sat)*rwgt + sat; + b = (1.0-sat)*rwgt; + c = (1.0-sat)*rwgt; + d = (1.0-sat)*gwgt; + e = (1.0-sat)*gwgt + sat; + f = (1.0-sat)*gwgt; + g = (1.0-sat)*bwgt; + h = (1.0-sat)*bwgt; + i = (1.0-sat)*bwgt + sat; + mmat[0][0] = a; + mmat[0][1] = b; + mmat[0][2] = c; + mmat[0][3] = 0.0; + + mmat[1][0] = d; + mmat[1][1] = e; + mmat[1][2] = f; + mmat[1][3] = 0.0; + + mmat[2][0] = g; + mmat[2][1] = h; + mmat[2][2] = i; + mmat[2][3] = 0.0; + + mmat[3][0] = 0.0; + mmat[3][1] = 0.0; + mmat[3][2] = 0.0; + mmat[3][3] = 1.0; + matrixmult(mmat,mat,mat); +} + +/* + * xrotate - + * rotate about the x (red) axis + */ +static void xrotatemat(mat,rs,rc) +float mat[4][4]; +float rs, rc; +{ + float mmat[4][4]; + + mmat[0][0] = 1.0; + mmat[0][1] = 0.0; + mmat[0][2] = 0.0; + mmat[0][3] = 0.0; + + mmat[1][0] = 0.0; + mmat[1][1] = rc; + mmat[1][2] = rs; + mmat[1][3] = 0.0; + + mmat[2][0] = 0.0; + mmat[2][1] = -rs; + mmat[2][2] = rc; + mmat[2][3] = 0.0; + + mmat[3][0] = 0.0; + mmat[3][1] = 0.0; + mmat[3][2] = 0.0; + mmat[3][3] = 1.0; + matrixmult(mmat,mat,mat); +} + +/* + * yrotate - + * rotate about the y (green) axis + */ +static void yrotatemat(mat,rs,rc) +float mat[4][4]; +float rs, rc; +{ + float mmat[4][4]; + + mmat[0][0] = rc; + mmat[0][1] = 0.0; + mmat[0][2] = -rs; + mmat[0][3] = 0.0; + + mmat[1][0] = 0.0; + mmat[1][1] = 1.0; + mmat[1][2] = 0.0; + mmat[1][3] = 0.0; + + mmat[2][0] = rs; + mmat[2][1] = 0.0; + mmat[2][2] = rc; + mmat[2][3] = 0.0; + + mmat[3][0] = 0.0; + mmat[3][1] = 0.0; + mmat[3][2] = 0.0; + mmat[3][3] = 1.0; + matrixmult(mmat,mat,mat); +} + +/* + * zrotate - + * rotate about the z (blue) axis + */ +static void zrotatemat(mat,rs,rc) +float mat[4][4]; +float rs, rc; +{ + float mmat[4][4]; + + mmat[0][0] = rc; + mmat[0][1] = rs; + mmat[0][2] = 0.0; + mmat[0][3] = 0.0; + + mmat[1][0] = -rs; + mmat[1][1] = rc; + mmat[1][2] = 0.0; + mmat[1][3] = 0.0; + + mmat[2][0] = 0.0; + mmat[2][1] = 0.0; + mmat[2][2] = 1.0; + mmat[2][3] = 0.0; + + mmat[3][0] = 0.0; + mmat[3][1] = 0.0; + mmat[3][2] = 0.0; + mmat[3][3] = 1.0; + matrixmult(mmat,mat,mat); +} + +/* + * zshear - + * shear z using x and y. + */ +static void zshearmat(mat,dx,dy) +float mat[4][4]; +float dx, dy; +{ + float mmat[4][4]; + + mmat[0][0] = 1.0; + mmat[0][1] = 0.0; + mmat[0][2] = dx; + mmat[0][3] = 0.0; + + mmat[1][0] = 0.0; + mmat[1][1] = 1.0; + mmat[1][2] = dy; + mmat[1][3] = 0.0; + + mmat[2][0] = 0.0; + mmat[2][1] = 0.0; + mmat[2][2] = 1.0; + mmat[2][3] = 0.0; + + mmat[3][0] = 0.0; + mmat[3][1] = 0.0; + mmat[3][2] = 0.0; + mmat[3][3] = 1.0; + matrixmult(mmat,mat,mat); +} + +/* + * simplehuerotatemat - + * simple hue rotation. This changes luminance + */ +//static void simplehuerotatemat(mat,rot) +//float mat[4][4]; +//float rot; +//{ +// float mag; +// float xrs, xrc; +// float yrs, yrc; +// float zrs, zrc; +// +// /* rotate the grey vector into positive Z */ +// mag = sqrt(2.0); +// xrs = 1.0/mag; +// xrc = 1.0/mag; +// xrotatemat(mat,xrs,xrc); +// +// mag = sqrt(3.0); +// yrs = -1.0/mag; +// yrc = sqrt(2.0)/mag; +// yrotatemat(mat,yrs,yrc); +// +// /* rotate the hue */ +// zrs = sin(rot*M_PI/180.0); +// zrc = cos(rot*M_PI/180.0); +// zrotatemat(mat,zrs,zrc); +// +// /* rotate the grey vector back into place */ +// yrotatemat(mat,-yrs,yrc); +// xrotatemat(mat,-xrs,xrc); +//} + +/* + * huerotatemat - + * rotate the hue, while maintaining luminance. + */ +static void huerotatemat(mat,rot) +float mat[4][4]; +float rot; +{ + float mmat[4][4]; + float mag; + float lx, ly, lz; + float xrs, xrc; + float yrs, yrc; + float zrs, zrc; + float zsx, zsy; + + identmat(mmat); + + /* rotate the grey vector into positive Z */ + mag = sqrt(2.0); + xrs = 1.0/mag; + xrc = 1.0/mag; + xrotatemat(mmat,xrs,xrc); + mag = sqrt(3.0); + yrs = -1.0/mag; + yrc = sqrt(2.0)/mag; + yrotatemat(mmat,yrs,yrc); + + /* shear the space to make the luminance plane horizontal */ + xformpnt(mmat,RLUM,GLUM,BLUM,&lx,&ly,&lz); + zsx = lx/lz; + zsy = ly/lz; + zshearmat(mmat,zsx,zsy); + + /* rotate the hue */ + zrs = sin(rot*M_PI/180.0); + zrc = cos(rot*M_PI/180.0); + zrotatemat(mmat,zrs,zrc); + + /* unshear the space to put the luminance plane back */ + zshearmat(mmat,-zsx,-zsy); + + /* rotate the grey vector back into place */ + yrotatemat(mmat,-yrs,yrc); + xrotatemat(mmat,-xrs,xrc); + + matrixmult(mmat,mat,mat); +} + +@end diff --git a/GPUImage/Source/GPUImageHalftoneFilter.h b/GPUImage/Source/GPUImageHalftoneFilter.h new file mode 100644 index 0000000..1860bc9 --- /dev/null +++ b/GPUImage/Source/GPUImageHalftoneFilter.h @@ -0,0 +1,5 @@ +#import "GPUImagePixellateFilter.h" + +@interface GPUImageHalftoneFilter : GPUImagePixellateFilter + +@end diff --git a/GPUImage/Source/GPUImageHalftoneFilter.m b/GPUImage/Source/GPUImageHalftoneFilter.m new file mode 100644 index 0000000..1b621c6 --- /dev/null +++ b/GPUImage/Source/GPUImageHalftoneFilter.m @@ -0,0 +1,79 @@ +#import "GPUImageHalftoneFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float fractionalWidthOfPixel; + uniform highp float aspectRatio; + uniform highp float dotScaling; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + + highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse); + + lowp vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb; + highp float dotScaling = 1.0 - dot(sampledColor, W); + + lowp float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling); + + gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0); + } +); +#else +NSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float fractionalWidthOfPixel; + uniform float aspectRatio; + uniform float dotScaling; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + + vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse); + + vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb; + float dotScaling = 1.0 - dot(sampledColor, W); + + float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling); + + gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0); + } +); +#endif + +@implementation GPUImageHalftoneFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageHalftoneFragmentShaderString])) + { + return nil; + } + + self.fractionalWidthOfAPixel = 0.01; + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageHardLightBlendFilter.h b/GPUImage/Source/GPUImageHardLightBlendFilter.h new file mode 100755 index 0000000..47d6260 --- /dev/null +++ b/GPUImage/Source/GPUImageHardLightBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageHardLightBlendFilter.m b/GPUImage/Source/GPUImageHardLightBlendFilter.m new file mode 100755 index 0000000..2896ce8 --- /dev/null +++ b/GPUImage/Source/GPUImageHardLightBlendFilter.m @@ -0,0 +1,99 @@ +#import "GPUImageHardLightBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + mediump vec4 base = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + highp float ra; + if (2.0 * overlay.r < overlay.a) { + ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } else { + ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } + + highp float ga; + if (2.0 * overlay.g < overlay.a) { + ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } else { + ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } + + highp float ba; + if (2.0 * overlay.b < overlay.a) { + ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } else { + ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } + + gl_FragColor = vec4(ra, ga, ba, 1.0); + } +); +#else +NSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + float ra; + if (2.0 * overlay.r < overlay.a) { + ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } else { + ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } + + float ga; + if (2.0 * overlay.g < overlay.a) { + ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } else { + ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } + + float ba; + if (2.0 * overlay.b < overlay.a) { + ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } else { + ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } + + gl_FragColor = vec4(ra, ga, ba, 1.0); + } +); +#endif + + +@implementation GPUImageHardLightBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageHardLightBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageHarrisCornerDetectionFilter.h b/GPUImage/Source/GPUImageHarrisCornerDetectionFilter.h new file mode 100755 index 0000000..1492b8b --- /dev/null +++ b/GPUImage/Source/GPUImageHarrisCornerDetectionFilter.h @@ -0,0 +1,53 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageGaussianBlurFilter; +@class GPUImageXYDerivativeFilter; +@class GPUImageGrayscaleFilter; +@class GPUImageGaussianBlurFilter; +@class GPUImageThresholdedNonMaximumSuppressionFilter; +@class GPUImageColorPackingFilter; + +//#define DEBUGFEATUREDETECTION + +/** Harris corner detector + + First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter) + + Second pass: blur the derivative (GPUImageGaussianBlurFilter) + + Third pass: apply the Harris corner detection calculation + + This is the Harris corner detector, as described in + C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988. + */ +@interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup +{ + GPUImageXYDerivativeFilter *derivativeFilter; + GPUImageGaussianBlurFilter *blurFilter; + GPUImageFilter *harrisCornerDetectionFilter; + GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter; + GPUImageColorPackingFilter *colorPackingFilter; + GLfloat *cornersArray; + GLubyte *rawImagePixels; +} + +/** The radius of the underlying Gaussian blur. The default is 2.0. + */ +@property(readwrite, nonatomic) CGFloat blurRadiusInPixels; + +// This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0. +@property(readwrite, nonatomic) CGFloat sensitivity; + +// A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20. +@property(readwrite, nonatomic) CGFloat threshold; + +// This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame +@property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime); + +// These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector +@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages; + +// Initialization and teardown +- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader; + +@end diff --git a/GPUImage/Source/GPUImageHarrisCornerDetectionFilter.m b/GPUImage/Source/GPUImageHarrisCornerDetectionFilter.m new file mode 100755 index 0000000..1a191fb --- /dev/null +++ b/GPUImage/Source/GPUImageHarrisCornerDetectionFilter.m @@ -0,0 +1,291 @@ +#import "GPUImageHarrisCornerDetectionFilter.h" +#import "GPUImageGaussianBlurFilter.h" +#import "GPUImageXYDerivativeFilter.h" +#import "GPUImageGrayscaleFilter.h" +#import "GPUImageThresholdedNonMaximumSuppressionFilter.h" +#import "GPUImageColorPackingFilter.h" +#import "GPUImageGaussianBlurFilter.h" + +@interface GPUImageHarrisCornerDetectionFilter() + +- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime; + +@end + +// This is the Harris corner detector, as described in +// C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988. + +@implementation GPUImageHarrisCornerDetectionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float sensitivity; + + const mediump float harrisConstant = 0.04; + + void main() + { + mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb; + + mediump float derivativeSum = derivativeElements.x + derivativeElements.y; + + mediump float zElement = (derivativeElements.z * 2.0) - 1.0; + + // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2 + mediump float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum; + + gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0); + } +); +#else +NSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float sensitivity; + + const float harrisConstant = 0.04; + + void main() + { + vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb; + + float derivativeSum = derivativeElements.x + derivativeElements.y; + + float zElement = (derivativeElements.z * 2.0) - 1.0; + + // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2 + float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum; + + gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0); + } +); +#endif + +@synthesize blurRadiusInPixels; +@synthesize cornersDetectedBlock; +@synthesize sensitivity = _sensitivity; +@synthesize threshold = _threshold; +@synthesize intermediateImages = _intermediateImages; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageHarrisCornerDetectionFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader; +{ + if (!(self = [super init])) + { + return nil; + } + +#ifdef DEBUGFEATUREDETECTION + _intermediateImages = [[NSMutableArray alloc] init]; +#endif + + // First pass: reduce to luminance and take the derivative of the luminance texture + derivativeFilter = [[GPUImageXYDerivativeFilter alloc] init]; + [self addFilter:derivativeFilter]; + +#ifdef DEBUGFEATUREDETECTION + __unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages; + __unsafe_unretained GPUImageFilter *weakFilter = derivativeFilter; + [derivativeFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + }]; +#endif + + // Second pass: blur the derivative + blurFilter = [[GPUImageGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + +#ifdef DEBUGFEATUREDETECTION + weakFilter = blurFilter; + [blurFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + }]; +#endif + + // Third pass: apply the Harris corner detection calculation + harrisCornerDetectionFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:cornerDetectionFragmentShader]; + [self addFilter:harrisCornerDetectionFilter]; + +#ifdef DEBUGFEATUREDETECTION + weakFilter = harrisCornerDetectionFilter; + [harrisCornerDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + }]; +#endif + + // Fourth pass: apply non-maximum suppression and thresholding to find the local maxima + nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] init]; + [self addFilter:nonMaximumSuppressionFilter]; + + __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self; +#ifdef DEBUGFEATUREDETECTION + weakFilter = nonMaximumSuppressionFilter; + [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + + [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime]; + }]; +#else + [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) { + [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime]; + }]; +#endif + +// Sixth pass: compress the thresholded points into the RGBA channels +// colorPackingFilter = [[GPUImageColorPackingFilter alloc] init]; +// [self addFilter:colorPackingFilter]; +// +// +//#ifdef DEBUGFEATUREDETECTION +// __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self; +// weakFilter = colorPackingFilter; +// [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ +// NSLog(@"Triggered response from compaction filter"); +// +// UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; +// [weakIntermediateImages addObject:intermediateImage]; +// +// [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime]; +// }]; +//#else +// __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self; +// [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) { +// [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime]; +// }]; +//#endif + + [derivativeFilter addTarget:blurFilter]; + [blurFilter addTarget:harrisCornerDetectionFilter]; + [harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter]; +// [simpleThresholdFilter addTarget:colorPackingFilter]; + + self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil]; +// self.terminalFilter = colorPackingFilter; + self.terminalFilter = nonMaximumSuppressionFilter; + + self.blurRadiusInPixels = 2.0; + self.sensitivity = 5.0; + self.threshold = 0.20; + + return self; +} + +- (void)dealloc; +{ + free(rawImagePixels); + free(cornersArray); +} + +#pragma mark - +#pragma mark Corner extraction + +- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime; +{ + // we need a normal color texture for this filter + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + + NSUInteger numberOfCorners = 0; + CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize; + + unsigned int imageByteSize = imageSize.width * imageSize.height * 4; + + if (rawImagePixels == NULL) + { + rawImagePixels = (GLubyte *)malloc(imageByteSize); + cornersArray = calloc(512 * 2, sizeof(GLfloat)); + } + + glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); + + CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + unsigned int imageWidth = imageSize.width * 4; + + unsigned int currentByte = 0; + unsigned int cornerStorageIndex = 0; + while (currentByte < imageByteSize) + { + GLubyte colorByte = rawImagePixels[currentByte]; + + if (colorByte > 0) + { + unsigned int xCoordinate = currentByte % imageWidth; + unsigned int yCoordinate = currentByte / imageWidth; + + cornersArray[cornerStorageIndex++] = (CGFloat)(xCoordinate / 4) / imageSize.width; + cornersArray[cornerStorageIndex++] = (CGFloat)(yCoordinate) / imageSize.height; + numberOfCorners++; + + numberOfCorners = MIN(numberOfCorners, 511); + cornerStorageIndex = MIN(cornerStorageIndex, 1021); + } + currentByte +=4; + } + + CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); + NSLog(@"Processing time : %f ms", 1000.0 * currentFrameTime); + + if (cornersDetectedBlock != NULL) + { + cornersDetectedBlock(cornersArray, numberOfCorners, frameTime); + } +} + +- (BOOL)wantsMonochromeInput; +{ + return YES; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setSensitivity:(CGFloat)newValue; +{ + _sensitivity = newValue; + [harrisCornerDetectionFilter setFloat:newValue forUniformName:@"sensitivity"]; +} + +- (void)setThreshold:(CGFloat)newValue; +{ + nonMaximumSuppressionFilter.threshold = newValue; +} + +- (CGFloat)threshold; +{ + return nonMaximumSuppressionFilter.threshold; +} + +@end diff --git a/GPUImage/Source/GPUImageHazeFilter.h b/GPUImage/Source/GPUImageHazeFilter.h new file mode 100755 index 0000000..eb3fbca --- /dev/null +++ b/GPUImage/Source/GPUImageHazeFilter.h @@ -0,0 +1,29 @@ +#import "GPUImageFilter.h" + +/* + * The haze filter can be used to add or remove haze (similar to a UV filter) + * + * @author Alaric Cole + * @creationDate 03/10/12 + * + */ + +/** The haze filter can be used to add or remove haze + + This is similar to a UV filter + */ +@interface GPUImageHazeFilter : GPUImageFilter +{ + GLint distanceUniform; + GLint slopeUniform; +} + +/** Strength of the color applied. Default 0. Values between -.3 and .3 are best + */ +@property(readwrite, nonatomic) CGFloat distance; + +/** Amount of color change. Default 0. Values between -.3 and .3 are best + */ +@property(readwrite, nonatomic) CGFloat slope; + +@end diff --git a/GPUImage/Source/GPUImageHazeFilter.m b/GPUImage/Source/GPUImageHazeFilter.m new file mode 100755 index 0000000..f90fc22 --- /dev/null +++ b/GPUImage/Source/GPUImageHazeFilter.m @@ -0,0 +1,96 @@ +#import "GPUImageHazeFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform lowp float hazeDistance; + uniform highp float slope; + + void main() + { + //todo reconsider precision modifiers + highp vec4 color = vec4(1.0);//todo reimplement as a parameter + + highp float d = textureCoordinate.y * slope + hazeDistance; + + highp vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply + + c = (c - d * color) / (1.0 -d); + + gl_FragColor = c; //consider using premultiply(c); + } +); +#else +NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float hazeDistance; + uniform float slope; + + void main() + { + //todo reconsider precision modifiers + vec4 color = vec4(1.0);//todo reimplement as a parameter + + float d = textureCoordinate.y * slope + hazeDistance; + + vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply + + c = (c - d * color) / (1.0 -d); + + gl_FragColor = c; //consider using premultiply(c); + } +); +#endif + + + + +@implementation GPUImageHazeFilter + +@synthesize distance = _distance; +@synthesize slope = _slope; +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageHazeFragmentShaderString])) + { + return nil; + } + + distanceUniform = [filterProgram uniformIndex:@"hazeDistance"]; + slopeUniform = [filterProgram uniformIndex:@"slope"]; + + self.distance = 0.2; + self.slope = 0.0; + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setDistance:(CGFloat)newValue; +{ + _distance = newValue; + + [self setFloat:_distance forUniform:distanceUniform program:filterProgram]; +} + +- (void)setSlope:(CGFloat)newValue; +{ + _slope = newValue; + + [self setFloat:_slope forUniform:slopeUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageHighPassFilter.h b/GPUImage/Source/GPUImageHighPassFilter.h new file mode 100644 index 0000000..263d8df --- /dev/null +++ b/GPUImage/Source/GPUImageHighPassFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilterGroup.h" +#import "GPUImageLowPassFilter.h" +#import "GPUImageDifferenceBlendFilter.h" + +@interface GPUImageHighPassFilter : GPUImageFilterGroup +{ + GPUImageLowPassFilter *lowPassFilter; + GPUImageDifferenceBlendFilter *differenceBlendFilter; +} + +// This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5. +@property(readwrite, nonatomic) CGFloat filterStrength; + +@end diff --git a/GPUImage/Source/GPUImageHighPassFilter.m b/GPUImage/Source/GPUImageHighPassFilter.m new file mode 100644 index 0000000..511240d --- /dev/null +++ b/GPUImage/Source/GPUImageHighPassFilter.m @@ -0,0 +1,46 @@ +#import "GPUImageHighPassFilter.h" + +@implementation GPUImageHighPassFilter + +@synthesize filterStrength; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // Start with a low pass filter to define the component to be removed + lowPassFilter = [[GPUImageLowPassFilter alloc] init]; + [self addFilter:lowPassFilter]; + + // Take the difference of the current frame from the low pass filtered result to get the high pass + differenceBlendFilter = [[GPUImageDifferenceBlendFilter alloc] init]; + [self addFilter:differenceBlendFilter]; + + // Texture location 0 needs to be the original image for the difference blend + [lowPassFilter addTarget:differenceBlendFilter atTextureLocation:1]; + + self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, differenceBlendFilter, nil]; + self.terminalFilter = differenceBlendFilter; + + self.filterStrength = 0.5; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setFilterStrength:(CGFloat)newValue; +{ + lowPassFilter.filterStrength = newValue; +} + +- (CGFloat)filterStrength; +{ + return lowPassFilter.filterStrength; +} + +@end diff --git a/GPUImage/Source/GPUImageHighlightShadowFilter.h b/GPUImage/Source/GPUImageHighlightShadowFilter.h new file mode 100644 index 0000000..3579129 --- /dev/null +++ b/GPUImage/Source/GPUImageHighlightShadowFilter.h @@ -0,0 +1,20 @@ +#import "GPUImageFilter.h" + +@interface GPUImageHighlightShadowFilter : GPUImageFilter +{ + GLint shadowsUniform, highlightsUniform; +} + +/** + * 0 - 1, increase to lighten shadows. + * @default 0 + */ +@property(readwrite, nonatomic) CGFloat shadows; + +/** + * 0 - 1, decrease to darken highlights. + * @default 1 + */ +@property(readwrite, nonatomic) CGFloat highlights; + +@end diff --git a/GPUImage/Source/GPUImageHighlightShadowFilter.m b/GPUImage/Source/GPUImageHighlightShadowFilter.m new file mode 100644 index 0000000..aa92b74 --- /dev/null +++ b/GPUImage/Source/GPUImageHighlightShadowFilter.m @@ -0,0 +1,93 @@ +#import "GPUImageHighlightShadowFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING +( +uniform sampler2D inputImageTexture; +varying highp vec2 textureCoordinate; + +uniform lowp float shadows; +uniform lowp float highlights; + +const mediump vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3); + +void main() +{ + lowp vec4 source = texture2D(inputImageTexture, textureCoordinate); + mediump float luminance = dot(source.rgb, luminanceWeighting); + + mediump float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0); + mediump float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0); + lowp vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0)); + + gl_FragColor = vec4(result.rgb, source.a); +} +); +#else +NSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + varying vec2 textureCoordinate; + + uniform float shadows; + uniform float highlights; + + const vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3); + + void main() + { + vec4 source = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(source.rgb, luminanceWeighting); + + float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0); + float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0); + vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0)); + + gl_FragColor = vec4(result.rgb, source.a); + } +); +#endif + +@implementation GPUImageHighlightShadowFilter + +@synthesize shadows = _shadows; +@synthesize highlights = _highlights; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageHighlightShadowFragmentShaderString])) + { + return nil; + } + + shadowsUniform = [filterProgram uniformIndex:@"shadows"]; + highlightsUniform = [filterProgram uniformIndex:@"highlights"]; + + self.shadows = 0.0; + self.highlights = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setShadows:(CGFloat)newValue; +{ + _shadows = newValue; + + [self setFloat:_shadows forUniform:shadowsUniform program:filterProgram]; +} + +- (void)setHighlights:(CGFloat)newValue; +{ + _highlights = newValue; + + [self setFloat:_highlights forUniform:highlightsUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageHistogramFilter.h b/GPUImage/Source/GPUImageHistogramFilter.h new file mode 100755 index 0000000..6016d5e --- /dev/null +++ b/GPUImage/Source/GPUImageHistogramFilter.h @@ -0,0 +1,25 @@ +#import "GPUImageFilter.h" + +typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType; + +@interface GPUImageHistogramFilter : GPUImageFilter +{ + GPUImageHistogramType histogramType; + + GLubyte *vertexSamplingCoordinates; + + GLProgram *secondFilterProgram, *thirdFilterProgram; + GLint secondFilterPositionAttribute, thirdFilterPositionAttribute; +} + +// Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1. +@property(readwrite, nonatomic) NSUInteger downsamplingFactor; + +// Initialization and teardown +- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType; +- (void)initializeSecondaryAttributes; + +// Rendering +- (void)generatePointCoordinates; + +@end diff --git a/GPUImage/Source/GPUImageHistogramFilter.m b/GPUImage/Source/GPUImageHistogramFilter.m new file mode 100755 index 0000000..4201e75 --- /dev/null +++ b/GPUImage/Source/GPUImageHistogramFilter.m @@ -0,0 +1,321 @@ +#import "GPUImageHistogramFilter.h" + +// Unlike other filters, this one uses a grid of GL_POINTs to sample the incoming image in a grid. A custom vertex shader reads the color in the texture at its position +// and outputs a bin position in the final histogram as the vertex position. That point is then written into the image of the histogram using translucent pixels. +// The degree of translucency is controlled by the scalingFactor, which lets you adjust the dynamic range of the histogram. The histogram can only be generated for one +// color channel or luminance value at a time. +// +// This is based on this implementation: http://www.shaderwrangler.com/publications/histogram/histogram_cameraready.pdf +// +// Or at least that's how it would work if iOS could read from textures in a vertex shader, which it can't. Therefore, I read the texture data down from the +// incoming frame and process the texture colors as vertices. + +NSString *const kGPUImageRedHistogramSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + varying vec3 colorFactor; + + void main() + { + colorFactor = vec3(1.0, 0.0, 0.0); + gl_Position = vec4(-1.0 + (position.x * 0.0078125), 0.0, 0.0, 1.0); + gl_PointSize = 1.0; + } +); + +NSString *const kGPUImageGreenHistogramSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + varying vec3 colorFactor; + + void main() + { + colorFactor = vec3(0.0, 1.0, 0.0); + gl_Position = vec4(-1.0 + (position.y * 0.0078125), 0.0, 0.0, 1.0); + gl_PointSize = 1.0; + } +); + +NSString *const kGPUImageBlueHistogramSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + varying vec3 colorFactor; + + void main() + { + colorFactor = vec3(0.0, 0.0, 1.0); + gl_Position = vec4(-1.0 + (position.z * 0.0078125), 0.0, 0.0, 1.0); + gl_PointSize = 1.0; + } +); + +NSString *const kGPUImageLuminanceHistogramSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + varying vec3 colorFactor; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + float luminance = dot(position.xyz, W); + + colorFactor = vec3(1.0, 1.0, 1.0); + gl_Position = vec4(-1.0 + (luminance * 0.0078125), 0.0, 0.0, 1.0); + gl_PointSize = 1.0; + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING +( + const lowp float scalingFactor = 1.0 / 256.0; + + varying lowp vec3 colorFactor; + + void main() + { + gl_FragColor = vec4(colorFactor * scalingFactor , 1.0); + } +); +#else +NSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING +( + const float scalingFactor = 1.0 / 256.0; + + varying vec3 colorFactor; + + void main() + { + gl_FragColor = vec4(colorFactor * scalingFactor , 1.0); + } +); +#endif + +@implementation GPUImageHistogramFilter + +@synthesize downsamplingFactor = _downsamplingFactor; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType; +{ + switch (newHistogramType) + { + case kGPUImageHistogramRed: + { + if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString])) + { + return nil; + } + }; break; + case kGPUImageHistogramGreen: + { + if (!(self = [super initWithVertexShaderFromString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString])) + { + return nil; + } + }; break; + case kGPUImageHistogramBlue: + { + if (!(self = [super initWithVertexShaderFromString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString])) + { + return nil; + } + }; break; + case kGPUImageHistogramLuminance: + { + if (!(self = [super initWithVertexShaderFromString:kGPUImageLuminanceHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString])) + { + return nil; + } + }; break; + case kGPUImageHistogramRGB: + { + if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString])) + { + return nil; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString]; + thirdFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString]; + + if (!secondFilterProgram.initialized) + { + [self initializeSecondaryAttributes]; + + if (![secondFilterProgram link]) + { + NSString *progLog = [secondFilterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [secondFilterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [secondFilterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + filterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + + } + + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + + glEnableVertexAttribArray(secondFilterPositionAttribute); + + if (![thirdFilterProgram link]) + { + NSString *progLog = [secondFilterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [secondFilterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [secondFilterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + filterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"]; + + + thirdFilterPositionAttribute = [thirdFilterProgram attributeIndex:@"position"]; + [GPUImageContext setActiveShaderProgram:thirdFilterProgram]; + + glEnableVertexAttribArray(thirdFilterPositionAttribute); + }); + }; break; + } + + histogramType = newHistogramType; + + self.downsamplingFactor = 16; + + return self; +} + +- (void)initializeSecondaryAttributes; +{ + [secondFilterProgram addAttribute:@"position"]; + [thirdFilterProgram addAttribute:@"position"]; +} + +- (void)dealloc; +{ + if (vertexSamplingCoordinates != NULL) + { + free(vertexSamplingCoordinates); + } +} + +#pragma mark - +#pragma mark Rendering + +- (void)generatePointCoordinates; +{ + vertexSamplingCoordinates = calloc(inputTextureSize.width * inputTextureSize.height * 4, sizeof(GLubyte)); +} + +- (CGSize)sizeOfFBO; +{ + return CGSizeMake(256.0, 3.0); +} + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + if (vertexSamplingCoordinates == NULL) + { + [self generatePointCoordinates]; + } + + [self renderToTextureWithVertices:NULL textureCoordinates:NULL sourceTexture:filterSourceTexture]; + + [self informTargetsAboutNewFrameAtTime:frameTime]; +} + +- (CGSize)outputFrameSize; +{ + return [self sizeOfFBO]; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + if (self.preventRendering) + { + return; + } + + inputTextureSize = newSize; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = kGPUImageNoRotation; +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + // we need a normal color texture for this filter + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + + if (self.preventRendering) + { + return; + } + + [GPUImageContext useImageProcessingContext]; + + glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, vertexSamplingCoordinates); + + [self setFilterFBO]; + + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glClearColor(0.0, 0.0, 0.0, 1.0); + glClear(GL_COLOR_BUFFER_BIT); + + glBlendEquation(GL_FUNC_ADD); + glBlendFunc(GL_ONE, GL_ONE); + glEnable(GL_BLEND); + + glVertexAttribPointer(filterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates); + glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor); + + if (histogramType == kGPUImageHistogramRGB) + { + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + + glVertexAttribPointer(secondFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates); + glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor); + + [GPUImageContext setActiveShaderProgram:thirdFilterProgram]; + + glVertexAttribPointer(thirdFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates); + glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor); + } + + glDisable(GL_BLEND); +} + +#pragma mark - +#pragma mark Accessors + +//- (void)setScalingFactor:(CGFloat)newValue; +//{ +// _scalingFactor = newValue; +// +// [GPUImageContext useImageProcessingContext]; +// [filterProgram use]; +// glUniform1f(scalingFactorUniform, _scalingFactor); +//} + +@end diff --git a/GPUImage/Source/GPUImageHistogramGenerator.h b/GPUImage/Source/GPUImageHistogramGenerator.h new file mode 100755 index 0000000..f80c50f --- /dev/null +++ b/GPUImage/Source/GPUImageHistogramGenerator.h @@ -0,0 +1,8 @@ +#import "GPUImageFilter.h" + +@interface GPUImageHistogramGenerator : GPUImageFilter +{ + GLint backgroundColorUniform; +} + +@end diff --git a/GPUImage/Source/GPUImageHistogramGenerator.m b/GPUImage/Source/GPUImageHistogramGenerator.m new file mode 100755 index 0000000..703795d --- /dev/null +++ b/GPUImage/Source/GPUImageHistogramGenerator.m @@ -0,0 +1,87 @@ +#import "GPUImageHistogramGenerator.h" + +NSString *const kGPUImageHistogramGeneratorVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + varying vec2 textureCoordinate; + varying float height; + + void main() + { + gl_Position = position; + textureCoordinate = vec2(inputTextureCoordinate.x, 0.5); + height = 1.0 - inputTextureCoordinate.y; + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp float height; + + uniform sampler2D inputImageTexture; + uniform lowp vec4 backgroundColor; + + void main() + { + lowp vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb; + lowp vec4 heightTest = vec4(step(height, colorChannels), 1.0); + gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b); + } +); +#else +NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying float height; + + uniform sampler2D inputImageTexture; + uniform vec4 backgroundColor; + + void main() + { + vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb; + vec4 heightTest = vec4(step(height, colorChannels), 1.0); + gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b); + } +); +#endif + +@implementation GPUImageHistogramGenerator + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageHistogramGeneratorVertexShaderString fragmentShaderFromString:kGPUImageHistogramGeneratorFragmentShaderString])) + { + return nil; + } + + backgroundColorUniform = [filterProgram uniformIndex:@"backgroundColor"]; + + [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0]; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; +{ +// GLfloat backgroundColor[4]; +// backgroundColor[0] = redComponent; +// backgroundColor[1] = greenComponent; +// backgroundColor[2] = blueComponent; +// backgroundColor[3] = alphaComponent; + GPUVector4 backgroundColor = {redComponent, greenComponent, blueComponent, alphaComponent}; + + [self setVec4:backgroundColor forUniform:backgroundColorUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageHoughTransformLineDetector.h b/GPUImage/Source/GPUImageHoughTransformLineDetector.h new file mode 100644 index 0000000..3ab6977 --- /dev/null +++ b/GPUImage/Source/GPUImageHoughTransformLineDetector.h @@ -0,0 +1,49 @@ +#import "GPUImageFilterGroup.h" +#import "GPUImageThresholdEdgeDetectionFilter.h" +#import "GPUImageParallelCoordinateLineTransformFilter.h" +#import "GPUImageThresholdedNonMaximumSuppressionFilter.h" +#import "GPUImageCannyEdgeDetectionFilter.h" + +// This applies a Hough transform to detect lines in a scene. It starts with a thresholded Sobel edge detection pass, +// then takes those edge points in and applies a Hough transform to convert them to lines. The intersection of these lines +// is then determined via blending and accumulation, and a non-maximum suppression filter is applied to find local maxima. +// These local maxima are then converted back into lines in normal space and returned via a callback block. +// +// Rather than using one of the standard Hough transform types, this filter uses parallel coordinate space which is far more efficient +// to rasterize on a GPU. +// +// This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology +// and described in their publications: +// +// M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7. +// http://medusa.fit.vutbr.cz/public/data/papers/2011-SCCG-Dubska-Real-Time-Line-Detection-Using-PC-and-OpenGL.pdf +// M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494. +// http://medusa.fit.vutbr.cz/public/data/papers/2011-CVPR-Dubska-PClines.pdf + +//#define DEBUGLINEDETECTION + +@interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup +{ + GPUImageOutput *thresholdEdgeDetectionFilter; + +// GPUImageThresholdEdgeDetectionFilter *thresholdEdgeDetectionFilter; + GPUImageParallelCoordinateLineTransformFilter *parallelCoordinateLineTransformFilter; + GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter; + + GLfloat *linesArray; + GLubyte *rawImagePixels; +} + +// A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9. +@property(readwrite, nonatomic) CGFloat edgeThreshold; + +// A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20. +@property(readwrite, nonatomic) CGFloat lineDetectionThreshold; + +// This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame +@property(nonatomic, copy) void(^linesDetectedBlock)(GLfloat* lineArray, NSUInteger linesDetected, CMTime frameTime); + +// These images are only enabled when built with DEBUGLINEDETECTION defined, and are used to examine the intermediate states of the Hough transform +@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages; + +@end diff --git a/GPUImage/Source/GPUImageHoughTransformLineDetector.m b/GPUImage/Source/GPUImageHoughTransformLineDetector.m new file mode 100644 index 0000000..4d94d8f --- /dev/null +++ b/GPUImage/Source/GPUImageHoughTransformLineDetector.m @@ -0,0 +1,232 @@ +#import "GPUImageHoughTransformLineDetector.h" + +@interface GPUImageHoughTransformLineDetector() + +- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime; + +@end + +@implementation GPUImageHoughTransformLineDetector + +@synthesize linesDetectedBlock; +@synthesize edgeThreshold; +@synthesize lineDetectionThreshold; +@synthesize intermediateImages = _intermediateImages; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + +#ifdef DEBUGLINEDETECTION + _intermediateImages = [[NSMutableArray alloc] init]; +#endif + + // First pass: do edge detection and threshold that to just have white pixels for edges +// if ([GPUImageContext deviceSupportsFramebufferReads]) +// if ([GPUImageContext deviceSupportsFramebufferReads]) +// { + thresholdEdgeDetectionFilter = [[GPUImageThresholdEdgeDetectionFilter alloc] init]; +// thresholdEdgeDetectionFilter = [[GPUImageSobelEdgeDetectionFilter alloc] init]; + [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:0.4]; +// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:0.25]; + [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:1.0]; +// thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init]; +// } +// else +// { +// thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init]; +// } + [self addFilter:thresholdEdgeDetectionFilter]; + +#ifdef DEBUGLINEDETECTION + __unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages; + __unsafe_unretained GPUImageOutput *weakFilter = thresholdEdgeDetectionFilter; + [thresholdEdgeDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + [weakIntermediateImages removeAllObjects]; + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + }]; +#endif + + // Second pass: extract the white points and draw representative lines in parallel coordinate space + parallelCoordinateLineTransformFilter = [[GPUImageParallelCoordinateLineTransformFilter alloc] init]; + [self addFilter:parallelCoordinateLineTransformFilter]; + +#ifdef DEBUGLINEDETECTION + weakFilter = parallelCoordinateLineTransformFilter; + [parallelCoordinateLineTransformFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + }]; +#endif + + // Third pass: apply non-maximum suppression + if ([GPUImageContext deviceSupportsFramebufferReads]) + { + nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:YES]; + } + else + { + nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:NO]; + } + [self addFilter:nonMaximumSuppressionFilter]; + + __unsafe_unretained GPUImageHoughTransformLineDetector *weakSelf = self; +#ifdef DEBUGLINEDETECTION + weakFilter = nonMaximumSuppressionFilter; + [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){ + + UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput]; + [weakIntermediateImages addObject:intermediateImage]; + + [weakSelf extractLineParametersFromImageAtFrameTime:frameTime]; + }]; +#else + [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) { + [weakSelf extractLineParametersFromImageAtFrameTime:frameTime]; + }]; +#endif + + [thresholdEdgeDetectionFilter addTarget:parallelCoordinateLineTransformFilter]; + [parallelCoordinateLineTransformFilter addTarget:nonMaximumSuppressionFilter]; + + self.initialFilters = [NSArray arrayWithObjects:thresholdEdgeDetectionFilter, nil]; + // self.terminalFilter = colorPackingFilter; + self.terminalFilter = nonMaximumSuppressionFilter; + +// self.edgeThreshold = 0.95; + self.lineDetectionThreshold = 0.8; + + return self; +} + +- (void)dealloc; +{ + free(rawImagePixels); + free(linesArray); +} + +#pragma mark - +#pragma mark Corner extraction + +- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime; +{ + // we need a normal color texture for this filter + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + + NSUInteger numberOfLines = 0; + CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize; + + unsigned int imageByteSize = imageSize.width * imageSize.height * 4; + + if (rawImagePixels == NULL) + { + rawImagePixels = (GLubyte *)malloc(imageByteSize); + linesArray = calloc(1024 * 2, sizeof(GLfloat)); + } + + glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); + +// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + unsigned int imageWidth = imageSize.width * 4; + + unsigned int currentByte = 0; + unsigned int cornerStorageIndex = 0; + while (currentByte < imageByteSize) + { + GLubyte colorByte = rawImagePixels[currentByte]; + + if (colorByte > 0) + { + unsigned int xCoordinate = currentByte % imageWidth; + unsigned int yCoordinate = currentByte / imageWidth; + + CGFloat normalizedXCoordinate = -1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / imageSize.width; + CGFloat normalizedYCoordinate = -1.0 + 2.0 * (CGFloat)(yCoordinate) / imageSize.height; + + if (normalizedXCoordinate < 0.0) + { + // T space + // m = -1 - d/u + // b = d * v/u + if (normalizedXCoordinate > -0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y + { + linesArray[cornerStorageIndex++] = 100000.0; + linesArray[cornerStorageIndex++] = normalizedYCoordinate; + } + else + { + linesArray[cornerStorageIndex++] = -1.0 - 1.0 / normalizedXCoordinate; + linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate; + } + } + else + { + // S space + // m = 1 - d/u + // b = d * v/u + if (normalizedXCoordinate < 0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y + { + linesArray[cornerStorageIndex++] = 100000.0; + linesArray[cornerStorageIndex++] = normalizedYCoordinate; + } + else + { + linesArray[cornerStorageIndex++] = 1.0 - 1.0 / normalizedXCoordinate; + linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate; + } + } + + numberOfLines++; + + numberOfLines = MIN(numberOfLines, 1023); + cornerStorageIndex = MIN(cornerStorageIndex, 2040); + } + currentByte +=4; + } + +// CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); +// NSLog(@"Processing time : %f ms", 1000.0 * currentFrameTime); + + if (linesDetectedBlock != NULL) + { + linesDetectedBlock(linesArray, numberOfLines, frameTime); + } +} + +- (BOOL)wantsMonochromeInput; +{ + return YES; +} + +#pragma mark - +#pragma mark Accessors + +/* +- (void)setEdgeThreshold:(CGFloat)newValue; +{ + thresholdEdgeDetectionFilter.threshold = newValue; +} + +- (CGFloat)edgeThreshold; +{ + return thresholdEdgeDetectionFilter.threshold; +} + */ + +- (void)setLineDetectionThreshold:(CGFloat)newValue; +{ + nonMaximumSuppressionFilter.threshold = newValue; +} + +- (CGFloat)lineDetectionThreshold; +{ + return nonMaximumSuppressionFilter.threshold; +} + +@end diff --git a/GPUImage/Source/GPUImageHueBlendFilter.h b/GPUImage/Source/GPUImageHueBlendFilter.h new file mode 100644 index 0000000..4399ffc --- /dev/null +++ b/GPUImage/Source/GPUImageHueBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageHueBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageHueBlendFilter.m b/GPUImage/Source/GPUImageHueBlendFilter.m new file mode 100644 index 0000000..f9dfbbb --- /dev/null +++ b/GPUImage/Source/GPUImageHueBlendFilter.m @@ -0,0 +1,212 @@ +#import "GPUImageHueBlendFilter.h" + +/** + * Hue blend mode based upon pseudo code from the PDF specification. + */ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + highp float lum(lowp vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + lowp vec3 clipcolor(lowp vec3 c) { + highp float l = lum(c); + lowp float n = min(min(c.r, c.g), c.b); + lowp float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + lowp vec3 setlum(lowp vec3 c, highp float l) { + highp float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + highp float sat(lowp vec3 c) { + lowp float n = min(min(c.r, c.g), c.b); + lowp float x = max(max(c.r, c.g), c.b); + return x - n; + } + + lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) { + return ((cmid - cmin) * s) / (cmax - cmin); + } + + lowp vec3 setsat(lowp vec3 c, highp float s) { + if (c.r > c.g) { + if (c.r > c.b) { + if (c.g > c.b) { + /* g is mid, b is min */ + c.g = mid(c.b, c.g, c.r, s); + c.b = 0.0; + } else { + /* b is mid, g is min */ + c.b = mid(c.g, c.b, c.r, s); + c.g = 0.0; + } + c.r = s; + } else { + /* b is max, r is mid, g is min */ + c.r = mid(c.g, c.r, c.b, s); + c.b = s; + c.r = 0.0; + } + } else if (c.r > c.b) { + /* g is max, r is mid, b is min */ + c.r = mid(c.b, c.r, c.g, s); + c.g = s; + c.b = 0.0; + } else if (c.g > c.b) { + /* g is max, b is mid, r is min */ + c.b = mid(c.r, c.b, c.g, s); + c.g = s; + c.r = 0.0; + } else if (c.b > c.g) { + /* b is max, g is mid, r is min */ + c.g = mid(c.r, c.g, c.b, s); + c.b = s; + c.r = 0.0; + } else { + c = vec3(0.0); + } + return c; + } + + void main() + { + highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#else +NSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + float lum(vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + vec3 clipcolor(vec3 c) { + float l = lum(c); + float n = min(min(c.r, c.g), c.b); + float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + vec3 setlum(vec3 c, float l) { + float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + float sat(vec3 c) { + float n = min(min(c.r, c.g), c.b); + float x = max(max(c.r, c.g), c.b); + return x - n; + } + + float mid(float cmin, float cmid, float cmax, float s) { + return ((cmid - cmin) * s) / (cmax - cmin); + } + + vec3 setsat(vec3 c, float s) { + if (c.r > c.g) { + if (c.r > c.b) { + if (c.g > c.b) { + /* g is mid, b is min */ + c.g = mid(c.b, c.g, c.r, s); + c.b = 0.0; + } else { + /* b is mid, g is min */ + c.b = mid(c.g, c.b, c.r, s); + c.g = 0.0; + } + c.r = s; + } else { + /* b is max, r is mid, g is min */ + c.r = mid(c.g, c.r, c.b, s); + c.b = s; + c.r = 0.0; + } + } else if (c.r > c.b) { + /* g is max, r is mid, b is min */ + c.r = mid(c.b, c.r, c.g, s); + c.g = s; + c.b = 0.0; + } else if (c.g > c.b) { + /* g is max, b is mid, r is min */ + c.b = mid(c.r, c.b, c.g, s); + c.g = s; + c.r = 0.0; + } else if (c.b > c.g) { + /* b is max, g is mid, r is min */ + c.g = mid(c.r, c.g, c.b, s); + c.b = s; + c.r = 0.0; + } else { + c = vec3(0.0); + } + return c; + } + + void main() + { + vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#endif + +@implementation GPUImageHueBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageHueBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageHueFilter.h b/GPUImage/Source/GPUImageHueFilter.h new file mode 100644 index 0000000..eef2465 --- /dev/null +++ b/GPUImage/Source/GPUImageHueFilter.h @@ -0,0 +1,11 @@ + +#import "GPUImageFilter.h" + +@interface GPUImageHueFilter : GPUImageFilter +{ + GLint hueAdjustUniform; + +} +@property (nonatomic, readwrite) CGFloat hue; + +@end diff --git a/GPUImage/Source/GPUImageHueFilter.m b/GPUImage/Source/GPUImageHueFilter.m new file mode 100644 index 0000000..5b42c86 --- /dev/null +++ b/GPUImage/Source/GPUImageHueFilter.m @@ -0,0 +1,123 @@ + +#import "GPUImageHueFilter.h" + +// Adapted from http://stackoverflow.com/questions/9234724/how-to-change-hue-of-a-texture-with-glsl - see for code and discussion +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHueFragmentShaderString = SHADER_STRING +( + precision highp float; + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform mediump float hueAdjust; + const highp vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0); + const highp vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0); + const highp vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0); + + const highp vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0); + const highp vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0); + const highp vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0); + + void main () + { + // Sample the input pixel + highp vec4 color = texture2D(inputImageTexture, textureCoordinate); + + // Convert to YIQ + highp float YPrime = dot (color, kRGBToYPrime); + highp float I = dot (color, kRGBToI); + highp float Q = dot (color, kRGBToQ); + + // Calculate the hue and chroma + highp float hue = atan (Q, I); + highp float chroma = sqrt (I * I + Q * Q); + + // Make the user's adjustments + hue += (-hueAdjust); //why negative rotation? + + // Convert back to YIQ + Q = chroma * sin (hue); + I = chroma * cos (hue); + + // Convert back to RGB + highp vec4 yIQ = vec4 (YPrime, I, Q, 0.0); + color.r = dot (yIQ, kYIQToR); + color.g = dot (yIQ, kYIQToG); + color.b = dot (yIQ, kYIQToB); + + // Save the result + gl_FragColor = color; + } +); +#else +NSString *const kGPUImageHueFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float hueAdjust; + const vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0); + const vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0); + const vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0); + + const vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0); + const vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0); + const vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0); + + void main () + { + // Sample the input pixel + vec4 color = texture2D(inputImageTexture, textureCoordinate); + + // Convert to YIQ + float YPrime = dot (color, kRGBToYPrime); + float I = dot (color, kRGBToI); + float Q = dot (color, kRGBToQ); + + // Calculate the hue and chroma + float hue = atan (Q, I); + float chroma = sqrt (I * I + Q * Q); + + // Make the user's adjustments + hue += (-hueAdjust); //why negative rotation? + + // Convert back to YIQ + Q = chroma * sin (hue); + I = chroma * cos (hue); + + // Convert back to RGB + vec4 yIQ = vec4 (YPrime, I, Q, 0.0); + color.r = dot (yIQ, kYIQToR); + color.g = dot (yIQ, kYIQToG); + color.b = dot (yIQ, kYIQToB); + + // Save the result + gl_FragColor = color; + } +); +#endif + +@implementation GPUImageHueFilter +@synthesize hue; + +- (id)init +{ + if(! (self = [super initWithFragmentShaderFromString:kGPUImageHueFragmentShaderString]) ) + { + return nil; + } + + hueAdjustUniform = [filterProgram uniformIndex:@"hueAdjust"]; + self.hue = 90; + + return self; +} + +- (void)setHue:(CGFloat)newHue +{ + // Convert degrees to radians for hue rotation + hue = fmodf(newHue, 360.0) * M_PI/180; + [self setFloat:hue forUniform:hueAdjustUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageJFAVoronoiFilter.h b/GPUImage/Source/GPUImageJFAVoronoiFilter.h new file mode 100644 index 0000000..4c50cc3 --- /dev/null +++ b/GPUImage/Source/GPUImageJFAVoronoiFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilter.h" + +@interface GPUImageJFAVoronoiFilter : GPUImageFilter +{ + GLuint secondFilterOutputTexture; + GLuint secondFilterFramebuffer; + + + GLint sampleStepUniform; + GLint sizeUniform; + NSUInteger numPasses; + +} + +@property (nonatomic, readwrite) CGSize sizeInPixels; + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageJFAVoronoiFilter.m b/GPUImage/Source/GPUImageJFAVoronoiFilter.m new file mode 100644 index 0000000..46efee0 --- /dev/null +++ b/GPUImage/Source/GPUImageJFAVoronoiFilter.m @@ -0,0 +1,592 @@ +// adapted from unitzeroone - http://unitzeroone.com/labs/jfavoronoi/ + +#import "GPUImageJFAVoronoiFilter.h" + +// The shaders are mostly taken from UnitZeroOne's WebGL example here: +// http://unitzeroone.com/blog/2011/03/22/jump-flood-voronoi-for-webgl/ + +NSString *const kGPUImageJFAVoronoiVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform float sampleStep; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 widthStep = vec2(sampleStep, 0.0); + vec2 heightStep = vec2(0.0, sampleStep); + vec2 widthHeightStep = vec2(sampleStep); + vec2 widthNegativeHeightStep = vec2(sampleStep, -sampleStep); + + textureCoordinate = inputTextureCoordinate.xy; + leftTextureCoordinate = inputTextureCoordinate.xy - widthStep; + rightTextureCoordinate = inputTextureCoordinate.xy + widthStep; + + topTextureCoordinate = inputTextureCoordinate.xy - heightStep; + topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep; + topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep; + + bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep; + bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep; + bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep; + } + ); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING +( + + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform vec2 size; + //varying vec2 textureCoordinate; + //uniform float sampleStep; + + vec2 getCoordFromColor(vec4 color) +{ + float z = color.z * 256.0; + float yoff = floor(z / 8.0); + float xoff = mod(z, 8.0); + float x = color.x*256.0 + xoff*256.0; + float y = color.y*256.0 + yoff*256.0; + return vec2(x,y) / size; +} + + void main(void) { + + vec2 sub; + vec4 dst; + vec4 local = texture2D(inputImageTexture, textureCoordinate); + vec4 sam; + float l; + float smallestDist; + if(local.a == 0.0){ + + smallestDist = dot(1.0,1.0); + }else{ + sub = getCoordFromColor(local)-textureCoordinate; + smallestDist = dot(sub,sub); + } + dst = local; + + + sam = texture2D(inputImageTexture, topRightTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, topTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, topLeftTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, bottomRightTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, bottomTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, leftTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, rightTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + gl_FragColor = dst; + } +); +#else +NSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform vec2 size; + //varying vec2 textureCoordinate; + //uniform float sampleStep; + + vec2 getCoordFromColor(vec4 color) +{ + float z = color.z * 256.0; + float yoff = floor(z / 8.0); + float xoff = mod(z, 8.0); + float x = color.x*256.0 + xoff*256.0; + float y = color.y*256.0 + yoff*256.0; + return vec2(x,y) / size; +} + + void main(void) { + + vec2 sub; + vec4 dst; + vec4 local = texture2D(inputImageTexture, textureCoordinate); + vec4 sam; + float l; + float smallestDist; + if(local.a == 0.0){ + + smallestDist = dot(1.0,1.0); + }else{ + sub = getCoordFromColor(local)-textureCoordinate; + smallestDist = dot(sub,sub); + } + dst = local; + + + sam = texture2D(inputImageTexture, topRightTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, topTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, topLeftTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, bottomRightTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, bottomTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, leftTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + + sam = texture2D(inputImageTexture, rightTextureCoordinate); + if(sam.a == 1.0){ + sub = (getCoordFromColor(sam)-textureCoordinate); + l = dot(sub,sub); + if(l < smallestDist){ + smallestDist = l; + dst = sam; + } + } + gl_FragColor = dst; + } +); +#endif + +@interface GPUImageJFAVoronoiFilter() { + int currentPass; +} + + +@end + +@implementation GPUImageJFAVoronoiFilter + +@synthesize sizeInPixels = _sizeInPixels; + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageJFAVoronoiVertexShaderString fragmentShaderFromString:kGPUImageJFAVoronoiFragmentShaderString])) + { + + NSLog(@"nil returned"); + return nil; + + } + + sampleStepUniform = [filterProgram uniformIndex:@"sampleStep"]; + sizeUniform = [filterProgram uniformIndex:@"size"]; + //[self disableSecondFrameCheck]; + + return self; +} + +-(void)setSizeInPixels:(CGSize)sizeInPixels { + _sizeInPixels = sizeInPixels; + + //validate that it's a power of 2 + + float width = log2(sizeInPixels.width); + float height = log2(sizeInPixels.height); + + if (width != height) { + NSLog(@"Voronoi point texture must be square"); + return; + } + if (width != floor(width) || height != floor(height)) { + NSLog(@"Voronoi point texture must be a power of 2. Texture size: %f, %f", sizeInPixels.width, sizeInPixels.height); + return; + } + glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height); +} + +#pragma mark - +#pragma mark Managing the display FBOs + + +- (void)initializeOutputTextureIfNeeded; +{ + [GPUImageContext useImageProcessingContext]; + + glActiveTexture(GL_TEXTURE2); + glGenTextures(1, &outputTexture); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + // This is necessary for non-power-of-two textures + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + glBindTexture(GL_TEXTURE_2D, 0); + + glGenTextures(1, &secondFilterOutputTexture); + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + glBindTexture(GL_TEXTURE_2D, 0); +} + +-(NSUInteger)nextPowerOfTwo:(CGPoint)input { + NSUInteger val; + if (input.x > input.y) { + val = (NSUInteger)input.x; + } else { + val = (NSUInteger)input.y; + } + + val--; + val = (val >> 1) | val; + val = (val >> 2) | val; + val = (val >> 4) | val; + val = (val >> 8) | val; + val = (val >> 16) | val; + val++; + return val; +} + +- (void)createFilterFBOofSize:(CGSize)currentFBOSize +{ + + [self prepareForImageCapture]; + numPasses = (int)log2([self nextPowerOfTwo:CGPointMake(currentFBOSize.width, currentFBOSize.height)]); + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + //preparedToCaptureImage = NO; + [super createFilterFBOofSize:currentFBOSize]; + //preparedToCaptureImage = YES; + + } + else + { + [super createFilterFBOofSize:currentFBOSize]; + + } + + glGenFramebuffers(1, &secondFilterFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#endif + + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ + + CFDictionaryRef empty; // empty value for attr value. + CFMutableDictionaryRef attrs; + empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary + attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); + CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty); + + err = CVPixelBufferCreate(kCFAllocatorDefault, (int)currentFBOSize.width, (int)currentFBOSize.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget); + if (err) + { + NSLog(@"FBO size: %f, %f", currentFBOSize.width, currentFBOSize.height); + NSAssert(NO, @"Error at CVPixelBufferCreate %d", err); + } + + err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, + filterTextureCache, renderTarget, + NULL, // texture attributes + GL_TEXTURE_2D, + self.outputTextureOptions.internalFormat, // opengl format + (int)currentFBOSize.width, + (int)currentFBOSize.height, + self.outputTextureOptions.format, // native iOS format + self.outputTextureOptions.type, + 0, + &renderTexture); + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + CFRelease(attrs); + CFRelease(empty); + glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); + secondFilterOutputTexture = CVOpenGLESTextureGetName(renderTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapS); + + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); + + [self notifyTargetsAboutNewOutputTexture]; +#endif + } + else + { + [self initializeOutputTextureIfNeeded]; + + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + (int)currentFBOSize.width, + (int)currentFBOSize.height, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, + 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, secondFilterOutputTexture, 0); + + [self notifyTargetsAboutNewOutputTexture]; + } + + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + + glBindTexture(GL_TEXTURE_2D, 0); +} + + +//we may not need these +- (void)setSecondFilterFBO; +{ + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); + // + // CGSize currentFBOSize = [self sizeOfFBO]; + // glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height); +} + +- (void)setOutputFBO; +{ + if (currentPass % 2 == 1) { + [self setSecondFilterFBO]; + } else { + [self setFilterFBO]; + } + +} + + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + // Run the first stage of the two-pass filter + [GPUImageContext setActiveShaderProgram:filterProgram]; + currentPass = 0; + [self setFilterFBO]; + + glActiveTexture(GL_TEXTURE2); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + glUniform1f(sampleStepUniform, 0.5); + + glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height); + + glBindTexture(GL_TEXTURE_2D, sourceTexture); + + glUniform1i(filterInputTextureUniform, 2); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + for (int pass = 1; pass <= numPasses + 1; pass++) { + currentPass = pass; + [self setOutputFBO]; + + //glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + if (pass % 2 == 0) { + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + } else { + glBindTexture(GL_TEXTURE_2D, outputTexture); + } + glUniform1i(filterInputTextureUniform, 2); + + float step = pow(2.0, numPasses - pass) / pow(2.0, numPasses); + glUniform1f(sampleStepUniform, step); + glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + } +} + +@end diff --git a/GPUImage/Source/GPUImageKuwaharaFilter.h b/GPUImage/Source/GPUImageKuwaharaFilter.h new file mode 100755 index 0000000..ef4ff47 --- /dev/null +++ b/GPUImage/Source/GPUImageKuwaharaFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +/** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images. + */ +@interface GPUImageKuwaharaFilter : GPUImageFilter +{ + GLint radiusUniform; +} + +/// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter. +@property(readwrite, nonatomic) GLuint radius; + +@end diff --git a/GPUImage/Source/GPUImageKuwaharaFilter.m b/GPUImage/Source/GPUImageKuwaharaFilter.m new file mode 100755 index 0000000..5c8e396 --- /dev/null +++ b/GPUImage/Source/GPUImageKuwaharaFilter.m @@ -0,0 +1,223 @@ +#import "GPUImageKuwaharaFilter.h" + +// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. "Anisotropic Kuwahara Filtering on the GPU," GPU Pro p.247 (2010). +// +// Original header: +// +// Anisotropic Kuwahara Filtering on the GPU +// by Jan Eric Kyprianidis + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + uniform sampler2D inputImageTexture; + uniform int radius; + + precision highp float; + + const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0); + + void main (void) + { + vec2 uv = textureCoordinate; + float n = float((radius + 1) * (radius + 1)); + int i; int j; + vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0); + vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0); + vec3 c; + + for (j = -radius; j <= 0; ++j) { + for (i = -radius; i <= 0; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m0 += c; + s0 += c * c; + } + } + + for (j = -radius; j <= 0; ++j) { + for (i = 0; i <= radius; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m1 += c; + s1 += c * c; + } + } + + for (j = 0; j <= radius; ++j) { + for (i = 0; i <= radius; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m2 += c; + s2 += c * c; + } + } + + for (j = 0; j <= radius; ++j) { + for (i = -radius; i <= 0; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m3 += c; + s3 += c * c; + } + } + + + float min_sigma2 = 1e+2; + m0 /= n; + s0 = abs(s0 / n - m0 * m0); + + float sigma2 = s0.r + s0.g + s0.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m0, 1.0); + } + + m1 /= n; + s1 = abs(s1 / n - m1 * m1); + + sigma2 = s1.r + s1.g + s1.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m1, 1.0); + } + + m2 /= n; + s2 = abs(s2 / n - m2 * m2); + + sigma2 = s2.r + s2.g + s2.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m2, 1.0); + } + + m3 /= n; + s3 = abs(s3 / n - m3 * m3); + + sigma2 = s3.r + s3.g + s3.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m3, 1.0); + } + } +); +#else +NSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + uniform sampler2D inputImageTexture; + uniform int radius; + + const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0); + + void main (void) + { + vec2 uv = textureCoordinate; + float n = float((radius + 1) * (radius + 1)); + int i; int j; + vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0); + vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0); + vec3 c; + + for (j = -radius; j <= 0; ++j) { + for (i = -radius; i <= 0; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m0 += c; + s0 += c * c; + } + } + + for (j = -radius; j <= 0; ++j) { + for (i = 0; i <= radius; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m1 += c; + s1 += c * c; + } + } + + for (j = 0; j <= radius; ++j) { + for (i = 0; i <= radius; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m2 += c; + s2 += c * c; + } + } + + for (j = 0; j <= radius; ++j) { + for (i = -radius; i <= 0; ++i) { + c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb; + m3 += c; + s3 += c * c; + } + } + + + float min_sigma2 = 1e+2; + m0 /= n; + s0 = abs(s0 / n - m0 * m0); + + float sigma2 = s0.r + s0.g + s0.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m0, 1.0); + } + + m1 /= n; + s1 = abs(s1 / n - m1 * m1); + + sigma2 = s1.r + s1.g + s1.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m1, 1.0); + } + + m2 /= n; + s2 = abs(s2 / n - m2 * m2); + + sigma2 = s2.r + s2.g + s2.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m2, 1.0); + } + + m3 /= n; + s3 = abs(s3 / n - m3 * m3); + + sigma2 = s3.r + s3.g + s3.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m3, 1.0); + } + } +); +#endif + +@implementation GPUImageKuwaharaFilter + +@synthesize radius = _radius; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaFragmentShaderString])) + { + return nil; + } + + radiusUniform = [filterProgram uniformIndex:@"radius"]; + + self.radius = 3; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setRadius:(GLuint)newValue; +{ + _radius = newValue; + + [self setInteger:_radius forUniform:radiusUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageKuwaharaRadius3Filter.h b/GPUImage/Source/GPUImageKuwaharaRadius3Filter.h new file mode 100644 index 0000000..c4591b8 --- /dev/null +++ b/GPUImage/Source/GPUImageKuwaharaRadius3Filter.h @@ -0,0 +1,8 @@ +// +// GPUImageKuwaharaRadius3Filter.h + +#import "GPUImageFilter.h" + +@interface GPUImageKuwaharaRadius3Filter : GPUImageFilter + +@end diff --git a/GPUImage/Source/GPUImageKuwaharaRadius3Filter.m b/GPUImage/Source/GPUImageKuwaharaRadius3Filter.m new file mode 100644 index 0000000..98b092c --- /dev/null +++ b/GPUImage/Source/GPUImageKuwaharaRadius3Filter.m @@ -0,0 +1,547 @@ +#import "GPUImageKuwaharaRadius3Filter.h" + +// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. "Anisotropic Kuwahara Filtering on the GPU," GPU Pro p.247 (2010). +// +// Original header: +// +// Anisotropic Kuwahara Filtering on the GPU +// by Jan Eric Kyprianidis + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + uniform sampler2D inputImageTexture; + + precision highp float; + + const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0); + + void main (void) + { + vec2 uv = textureCoordinate; + float n = float(16); // radius is assumed to be 3 + vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0); + vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0); + vec3 c; + vec3 cSq; + + c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m3 += c; + s3 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m3 += c; + s3 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m3 += c; + s3 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb; + m1 += c; + s1 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb; + m1 += c; + s1 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb; + m1 += c; + s1 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb; + cSq = c * c; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb; + cSq = c * c; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb; + cSq = c * c; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb; + cSq = c * c; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb; + cSq = c * c; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb; + cSq = c * c; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb; + m3 += c; + s3 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb; + m3 += c; + s3 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb; + m3 += c; + s3 += c * c; + + float min_sigma2 = 1e+2; + m0 /= n; + s0 = abs(s0 / n - m0 * m0); + + float sigma2 = s0.r + s0.g + s0.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m0, 1.0); + } + + m1 /= n; + s1 = abs(s1 / n - m1 * m1); + + sigma2 = s1.r + s1.g + s1.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m1, 1.0); + } + + m2 /= n; + s2 = abs(s2 / n - m2 * m2); + + sigma2 = s2.r + s2.g + s2.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m2, 1.0); + } + + m3 /= n; + s3 = abs(s3 / n - m3 * m3); + + sigma2 = s3.r + s3.g + s3.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m3, 1.0); + } + } +); +#else +NSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + uniform sampler2D inputImageTexture; + + const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0); + + void main (void) + { + vec2 uv = textureCoordinate; + float n = float(16); // radius is assumed to be 3 + vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0); + vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0); + vec3 c; + vec3 cSq; + + c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb; + m0 += c; + s0 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m3 += c; + s3 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m3 += c; + s3 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m3 += c; + s3 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb; + cSq = c * c; + m0 += c; + s0 += cSq; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb; + m1 += c; + s1 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb; + m1 += c; + s1 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb; + m1 += c; + s1 += c * c; + c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb; + m1 += c; + s1 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb; + cSq = c * c; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb; + cSq = c * c; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb; + cSq = c * c; + m1 += c; + s1 += cSq; + m2 += c; + s2 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb; + cSq = c * c; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb; + cSq = c * c; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb; + m2 += c; + s2 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb; + cSq = c * c; + m2 += c; + s2 += cSq; + m3 += c; + s3 += cSq; + + c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb; + m3 += c; + s3 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb; + m3 += c; + s3 += c * c; + + c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb; + m3 += c; + s3 += c * c; + c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb; + m3 += c; + s3 += c * c; + + float min_sigma2 = 1e+2; + m0 /= n; + s0 = abs(s0 / n - m0 * m0); + + float sigma2 = s0.r + s0.g + s0.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m0, 1.0); + } + + m1 /= n; + s1 = abs(s1 / n - m1 * m1); + + sigma2 = s1.r + s1.g + s1.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m1, 1.0); + } + + m2 /= n; + s2 = abs(s2 / n - m2 * m2); + + sigma2 = s2.r + s2.g + s2.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m2, 1.0); + } + + m3 /= n; + s3 = abs(s3 / n - m3 * m3); + + sigma2 = s3.r + s3.g + s3.b; + if (sigma2 < min_sigma2) { + min_sigma2 = sigma2; + gl_FragColor = vec4(m3, 1.0); + } + } +); +#endif + +@implementation GPUImageKuwaharaRadius3Filter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaRadius3FragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageLanczosResamplingFilter.h b/GPUImage/Source/GPUImageLanczosResamplingFilter.h new file mode 100644 index 0000000..5d7409f --- /dev/null +++ b/GPUImage/Source/GPUImageLanczosResamplingFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +@interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter + +@property(readwrite, nonatomic) CGSize originalImageSize; + +@end diff --git a/GPUImage/Source/GPUImageLanczosResamplingFilter.m b/GPUImage/Source/GPUImageLanczosResamplingFilter.m new file mode 100644 index 0000000..54483ef --- /dev/null +++ b/GPUImage/Source/GPUImageLanczosResamplingFilter.m @@ -0,0 +1,202 @@ +#import "GPUImageLanczosResamplingFilter.h" + +NSString *const kGPUImageLanczosVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec2 inputTextureCoordinate; + + uniform float texelWidthOffset; + uniform float texelHeightOffset; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepLeftTextureCoordinate; + varying vec2 twoStepsLeftTextureCoordinate; + varying vec2 threeStepsLeftTextureCoordinate; + varying vec2 fourStepsLeftTextureCoordinate; + varying vec2 oneStepRightTextureCoordinate; + varying vec2 twoStepsRightTextureCoordinate; + varying vec2 threeStepsRightTextureCoordinate; + varying vec2 fourStepsRightTextureCoordinate; + + void main() + { + gl_Position = position; + + vec2 firstOffset = vec2(texelWidthOffset, texelHeightOffset); + vec2 secondOffset = vec2(2.0 * texelWidthOffset, 2.0 * texelHeightOffset); + vec2 thirdOffset = vec2(3.0 * texelWidthOffset, 3.0 * texelHeightOffset); + vec2 fourthOffset = vec2(4.0 * texelWidthOffset, 4.0 * texelHeightOffset); + + centerTextureCoordinate = inputTextureCoordinate; + oneStepLeftTextureCoordinate = inputTextureCoordinate - firstOffset; + twoStepsLeftTextureCoordinate = inputTextureCoordinate - secondOffset; + threeStepsLeftTextureCoordinate = inputTextureCoordinate - thirdOffset; + fourStepsLeftTextureCoordinate = inputTextureCoordinate - fourthOffset; + oneStepRightTextureCoordinate = inputTextureCoordinate + firstOffset; + twoStepsRightTextureCoordinate = inputTextureCoordinate + secondOffset; + threeStepsRightTextureCoordinate = inputTextureCoordinate + thirdOffset; + fourStepsRightTextureCoordinate = inputTextureCoordinate + fourthOffset; + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepLeftTextureCoordinate; + varying vec2 twoStepsLeftTextureCoordinate; + varying vec2 threeStepsLeftTextureCoordinate; + varying vec2 fourStepsLeftTextureCoordinate; + varying vec2 oneStepRightTextureCoordinate; + varying vec2 twoStepsRightTextureCoordinate; + varying vec2 threeStepsRightTextureCoordinate; + varying vec2 fourStepsRightTextureCoordinate; + + // sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2) + // Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5 + + void main() + { + lowp vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026; + + fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667; + fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667; + + fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074; + fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074; + + fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612; + fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612; + + fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143; + fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143; + + gl_FragColor = fragmentColor; + } +); +#else +NSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepLeftTextureCoordinate; + varying vec2 twoStepsLeftTextureCoordinate; + varying vec2 threeStepsLeftTextureCoordinate; + varying vec2 fourStepsLeftTextureCoordinate; + varying vec2 oneStepRightTextureCoordinate; + varying vec2 twoStepsRightTextureCoordinate; + varying vec2 threeStepsRightTextureCoordinate; + varying vec2 fourStepsRightTextureCoordinate; + + // sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2) + // Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5 + + void main() + { + vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026; + + fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667; + fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667; + + fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074; + fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074; + + fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612; + fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612; + + fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143; + fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143; + + gl_FragColor = fragmentColor; + } +); +#endif + +@implementation GPUImageLanczosResamplingFilter + +@synthesize originalImageSize = _originalImageSize; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageLanczosVertexShaderString firstStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString secondStageVertexShaderFromString:kGPUImageLanczosVertexShaderString secondStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString])) + { + return nil; + } + + return self; +} + +// Base texture sampling offset on the input image, not the final size +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + self.originalImageSize = newSize; + [super setInputSize:newSize atIndex:textureIndex]; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + // The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + verticalPassTexelWidthOffset = 1.0 / _originalImageSize.height; + verticalPassTexelHeightOffset = 0.0; + } + else + { + verticalPassTexelWidthOffset = 0.0; + verticalPassTexelHeightOffset = 1.0 / _originalImageSize.height; + } + + horizontalPassTexelWidthOffset = 1.0 / _originalImageSize.width; + horizontalPassTexelHeightOffset = 0.0; + }); +} + +// The first pass (vertical) of the resampling needs to be shrunk in only one dimension so that the remaining shrinkage can be performed in the horizonal pass +- (void)setFilterFBO; +{ + CGSize currentFBOSize = [self sizeOfFBO]; + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + currentFBOSize.height = self.originalImageSize.height; + } + else + { + currentFBOSize.width = self.originalImageSize.width; + } + + if (!filterFramebuffer) + { + [super createFilterFBOofSize:currentFBOSize]; + [self setupFilterForSize:currentFBOSize]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer); + + glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height); +} + +- (void)setSecondFilterFBO; +{ + CGSize currentFBOSize = [self sizeOfFBO]; + if (!secondFilterFramebuffer) + { + [self createFilterFBOofSize:currentFBOSize]; + [self setupFilterForSize:currentFBOSize]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); + + glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height); +} + +@end diff --git a/GPUImage/Source/GPUImageLaplacianFilter.h b/GPUImage/Source/GPUImageLaplacianFilter.h new file mode 100644 index 0000000..267c1ba --- /dev/null +++ b/GPUImage/Source/GPUImageLaplacianFilter.h @@ -0,0 +1,5 @@ +#import "GPUImage3x3ConvolutionFilter.h" + +@interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter + +@end diff --git a/GPUImage/Source/GPUImageLaplacianFilter.m b/GPUImage/Source/GPUImageLaplacianFilter.m new file mode 100644 index 0000000..98b7850 --- /dev/null +++ b/GPUImage/Source/GPUImageLaplacianFilter.m @@ -0,0 +1,115 @@ +#import "GPUImageLaplacianFilter.h" +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + uniform mediump mat3 convolutionMatrix; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb; + mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb; + mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb; + mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb; + + mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2]; + resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2]; + resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2]; + + // Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace + resultColor = resultColor + 0.5; + + gl_FragColor = vec4(resultColor, centerColor.a); + } +); +#else +NSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + uniform mat3 convolutionMatrix; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb; + vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb; + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb; + vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb; + + vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2]; + resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2]; + resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2]; + + // Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace + resultColor = resultColor + 0.5; + + gl_FragColor = vec4(resultColor, centerColor.a); + } +); +#endif + +@implementation GPUImageLaplacianFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLaplacianFragmentShaderString])) + { + return nil; + } + + GPUMatrix3x3 newConvolutionMatrix; + newConvolutionMatrix.one.one = 0.5; + newConvolutionMatrix.one.two = 1.0; + newConvolutionMatrix.one.three = 0.5; + + newConvolutionMatrix.two.one = 1.0; + newConvolutionMatrix.two.two = -6.0; + newConvolutionMatrix.two.three = 1.0; + + newConvolutionMatrix.three.one = 0.5; + newConvolutionMatrix.three.two = 1.0; + newConvolutionMatrix.three.three = 0.5; + + self.convolutionKernel = newConvolutionMatrix; + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageLevelsFilter.h b/GPUImage/Source/GPUImageLevelsFilter.h new file mode 100644 index 0000000..d0948fb --- /dev/null +++ b/GPUImage/Source/GPUImageLevelsFilter.h @@ -0,0 +1,45 @@ +#import "GPUImageFilter.h" + +/** + * Levels like Photoshop. + * + * The min, max, minOut and maxOut parameters are floats in the range [0, 1]. + * If you have parameters from Photoshop in the range [0, 255] you must first + * convert them to be [0, 1]. + * The gamma/mid parameter is a float >= 0. This matches the value from Photoshop. + * + * If you want to apply levels to RGB as well as individual channels you need to use + * this filter twice - first for the individual channels and then for all channels. + */ +@interface GPUImageLevelsFilter : GPUImageFilter +{ + GLint minUniform; + GLint midUniform; + GLint maxUniform; + GLint minOutputUniform; + GLint maxOutputUniform; + + GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector; +} + +/** Set levels for the red channel */ +- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut; + +- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max; + +/** Set levels for the green channel */ +- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut; + +- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max; + +/** Set levels for the blue channel */ +- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut; + +- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max; + +/** Set levels for all channels at once */ +- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut; +- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max; + +@end + diff --git a/GPUImage/Source/GPUImageLevelsFilter.m b/GPUImage/Source/GPUImageLevelsFilter.m new file mode 100644 index 0000000..158815f --- /dev/null +++ b/GPUImage/Source/GPUImageLevelsFilter.m @@ -0,0 +1,152 @@ +#import "GPUImageLevelsFilter.h" + +/* + ** Gamma correction + ** Details: http://blog.mouaif.org/2009/01/22/photoshop-gamma-correction-shader/ + */ + +#define GammaCorrection(color, gamma) pow(color, 1.0 / gamma) + +/* + ** Levels control (input (+gamma), output) + ** Details: http://blog.mouaif.org/2009/01/28/levels-control-shader/ + */ + +#define LevelsControlInputRange(color, minInput, maxInput) min(max(color - minInput, vec3(0.0)) / (maxInput - minInput), vec3(1.0)) +#define LevelsControlInput(color, minInput, gamma, maxInput) GammaCorrection(LevelsControlInputRange(color, minInput, maxInput), gamma) +#define LevelsControlOutputRange(color, minOutput, maxOutput) mix(minOutput, maxOutput, color) +#define LevelsControl(color, minInput, gamma, maxInput, minOutput, maxOutput) LevelsControlOutputRange(LevelsControlInput(color, minInput, gamma, maxInput), minOutput, maxOutput) + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform mediump vec3 levelMinimum; + uniform mediump vec3 levelMiddle; + uniform mediump vec3 levelMaximum; + uniform mediump vec3 minOutput; + uniform mediump vec3 maxOutput; + + void main() + { + mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a); + } +); +#else +NSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform vec3 levelMinimum; + uniform vec3 levelMiddle; + uniform vec3 levelMaximum; + uniform vec3 minOutput; + uniform vec3 maxOutput; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a); + } +); +#endif + +@implementation GPUImageLevelsFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLevelsFragmentShaderString])) + { + return nil; + } + + minUniform = [filterProgram uniformIndex:@"levelMinimum"]; + midUniform = [filterProgram uniformIndex:@"levelMiddle"]; + maxUniform = [filterProgram uniformIndex:@"levelMaximum"]; + minOutputUniform = [filterProgram uniformIndex:@"minOutput"]; + maxOutputUniform = [filterProgram uniformIndex:@"maxOutput"]; + + [self setRedMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0]; + [self setGreenMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0]; + [self setBlueMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0]; + + return self; +} + +#pragma mark - +#pragma mark Helpers + +- (void)updateUniforms { + [self setVec3:minVector forUniform:minUniform program:filterProgram]; + [self setVec3:midVector forUniform:midUniform program:filterProgram]; + [self setVec3:maxVector forUniform:maxUniform program:filterProgram]; + [self setVec3:minOutputVector forUniform:minOutputUniform program:filterProgram]; + [self setVec3:maxOutputVector forUniform:maxOutputUniform program:filterProgram]; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut { + [self setRedMin:min gamma:mid max:max minOut:minOut maxOut:maxOut]; + [self setGreenMin:min gamma:mid max:max minOut:minOut maxOut:maxOut]; + [self setBlueMin:min gamma:mid max:max minOut:minOut maxOut:maxOut]; +} + +- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max { + [self setMin:min gamma:mid max:max minOut:0.0 maxOut:1.0]; +} + +- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut { + minVector.one = min; + midVector.one = mid; + maxVector.one = max; + minOutputVector.one = minOut; + maxOutputVector.one = maxOut; + + [self updateUniforms]; +} + +- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max { + [self setRedMin:min gamma:mid max:max minOut:0.0 maxOut:1.0]; +} + +- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut { + minVector.two = min; + midVector.two = mid; + maxVector.two = max; + minOutputVector.two = minOut; + maxOutputVector.two = maxOut; + + [self updateUniforms]; +} + +- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max { + [self setGreenMin:min gamma:mid max:max minOut:0.0 maxOut:1.0]; +} + +- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut { + minVector.three = min; + midVector.three = mid; + maxVector.three = max; + minOutputVector.three = minOut; + maxOutputVector.three = maxOut; + + [self updateUniforms]; +} + +- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max { + [self setBlueMin:min gamma:mid max:max minOut:0.0 maxOut:1.0]; +} + +@end + diff --git a/GPUImage/Source/GPUImageLightenBlendFilter.h b/GPUImage/Source/GPUImageLightenBlendFilter.h new file mode 100755 index 0000000..b0287c1 --- /dev/null +++ b/GPUImage/Source/GPUImageLightenBlendFilter.h @@ -0,0 +1,8 @@ +#import "GPUImageTwoInputFilter.h" + +/// Blends two images by taking the maximum value of each color component between the images +@interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageLightenBlendFilter.m b/GPUImage/Source/GPUImageLightenBlendFilter.m new file mode 100755 index 0000000..2bbd4b2 --- /dev/null +++ b/GPUImage/Source/GPUImageLightenBlendFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageLightenBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = max(textureColor, textureColor2); + } +); +#else +NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = max(textureColor, textureColor2); + } + ); +#endif + +@implementation GPUImageLightenBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLightenBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageLineGenerator.h b/GPUImage/Source/GPUImageLineGenerator.h new file mode 100644 index 0000000..4c46736 --- /dev/null +++ b/GPUImage/Source/GPUImageLineGenerator.h @@ -0,0 +1,18 @@ +#import "GPUImageFilter.h" + +@interface GPUImageLineGenerator : GPUImageFilter +{ + GLint lineWidthUniform, lineColorUniform; + GLfloat *lineCoordinates; +} + +// The width of the displayed lines, in pixels. The default is 1. +@property(readwrite, nonatomic) CGFloat lineWidth; + +// The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0). +- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; + +// Rendering +- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime; + +@end diff --git a/GPUImage/Source/GPUImageLineGenerator.m b/GPUImage/Source/GPUImageLineGenerator.m new file mode 100644 index 0000000..513250e --- /dev/null +++ b/GPUImage/Source/GPUImageLineGenerator.m @@ -0,0 +1,163 @@ +#import "GPUImageLineGenerator.h" + +NSString *const kGPUImageLineGeneratorVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + void main() + { + gl_Position = position; + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING +( + uniform lowp vec3 lineColor; + + void main() + { + gl_FragColor = vec4(lineColor, 1.0); + } +); +#else +NSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING +( + uniform vec3 lineColor; + + void main() + { + gl_FragColor = vec4(lineColor, 1.0); + } +); +#endif + +@interface GPUImageLineGenerator() + +- (void)generateLineCoordinates; + +@end + +@implementation GPUImageLineGenerator + +@synthesize lineWidth = _lineWidth; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageLineGeneratorVertexShaderString fragmentShaderFromString:kGPUImageLineGeneratorFragmentShaderString])) + { + return nil; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + lineWidthUniform = [filterProgram uniformIndex:@"lineWidth"]; + lineColorUniform = [filterProgram uniformIndex:@"lineColor"]; + + self.lineWidth = 1.0; + [self setLineColorRed:0.0 green:1.0 blue:0.0]; + }); + + return self; +} + +- (void)dealloc +{ + if (lineCoordinates) + { + free(lineCoordinates); + } +} + +#pragma mark - +#pragma mark Rendering + +- (void)generateLineCoordinates; +{ + lineCoordinates = calloc(1024 * 4, sizeof(GLfloat)); +} + +- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime; +{ + if (self.preventRendering) + { + return; + } + + if (lineCoordinates == NULL) + { + [self generateLineCoordinates]; + } + + // Iterate through and generate vertices from the slopes and intercepts + NSUInteger currentVertexIndex = 0; + NSUInteger currentLineIndex = 0; + NSUInteger maxLineIndex = numberOfLines *2; + while(currentLineIndex < maxLineIndex) + { + GLfloat slope = lineSlopeAndIntercepts[currentLineIndex++]; + GLfloat intercept = lineSlopeAndIntercepts[currentLineIndex++]; + + if (slope > 9000.0) // Vertical line + { + lineCoordinates[currentVertexIndex++] = intercept; + lineCoordinates[currentVertexIndex++] = -1.0; + lineCoordinates[currentVertexIndex++] = intercept; + lineCoordinates[currentVertexIndex++] = 1.0; + } + else + { + lineCoordinates[currentVertexIndex++] = -1.0; + lineCoordinates[currentVertexIndex++] = slope * -1.0 + intercept; + lineCoordinates[currentVertexIndex++] = 1.0; + lineCoordinates[currentVertexIndex++] = slope * 1.0 + intercept; + } + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + + [self setFilterFBO]; + + glClearColor(0.0, 0.0, 0.0, 0.0); + glClear(GL_COLOR_BUFFER_BIT); + + glBlendEquation(GL_FUNC_ADD); + glBlendFunc(GL_ONE, GL_ONE); + glEnable(GL_BLEND); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates); + glDrawArrays(GL_LINES, 0, ((unsigned int)numberOfLines * 2)); + + glDisable(GL_BLEND); + + [self informTargetsAboutNewFrameAtTime:frameTime]; + }); +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + // Prevent rendering of the frame by normal means +} + +#pragma mark - +#pragma mark Accessors + +- (void)setLineWidth:(CGFloat)newValue; +{ + _lineWidth = newValue; + [GPUImageContext setActiveShaderProgram:filterProgram]; + glLineWidth(newValue); +} + +- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 lineColor = {redComponent, greenComponent, blueComponent}; + + [self setVec3:lineColor forUniform:lineColorUniform program:filterProgram]; +} + + +@end diff --git a/GPUImage/Source/GPUImageLinearBurnBlendFilter.h b/GPUImage/Source/GPUImageLinearBurnBlendFilter.h new file mode 100644 index 0000000..7e5e415 --- /dev/null +++ b/GPUImage/Source/GPUImageLinearBurnBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageLinearBurnBlendFilter.m b/GPUImage/Source/GPUImageLinearBurnBlendFilter.m new file mode 100644 index 0000000..4703196 --- /dev/null +++ b/GPUImage/Source/GPUImageLinearBurnBlendFilter.m @@ -0,0 +1,51 @@ +#import "GPUImageLinearBurnBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a); + } +); +#else +NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a); + } +); +#endif + +@implementation GPUImageLinearBurnBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLinearBurnBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageLocalBinaryPatternFilter.h b/GPUImage/Source/GPUImageLocalBinaryPatternFilter.h new file mode 100644 index 0000000..431dbbd --- /dev/null +++ b/GPUImage/Source/GPUImageLocalBinaryPatternFilter.h @@ -0,0 +1,5 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +@interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter + +@end diff --git a/GPUImage/Source/GPUImageLocalBinaryPatternFilter.m b/GPUImage/Source/GPUImageLocalBinaryPatternFilter.m new file mode 100644 index 0000000..1ee1f8d --- /dev/null +++ b/GPUImage/Source/GPUImageLocalBinaryPatternFilter.m @@ -0,0 +1,123 @@ +#import "GPUImageLocalBinaryPatternFilter.h" + +// This is based on "Accelerating image recognition on mobile devices using GPGPU" by Miguel Bordallo Lopez, Henri Nykanen, Jari Hannuksela, Olli Silven and Markku Vehvilainen +// http://www.ee.oulu.fi/~jhannuks/publications/SPIE2011a.pdf + +// Right pixel is the most significant bit, traveling clockwise to get to the upper right, which is the least significant +// If the external pixel is greater than or equal to the center, set to 1, otherwise 0 +// +// 2 1 0 +// 3 7 +// 4 5 6 + +// 01101101 +// 76543210 + +@implementation GPUImageLocalBinaryPatternFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r; + lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + + lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity); + byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity); + byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity); + byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity); + byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity); + byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity); + byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity); + byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity); + + // TODO: Replace the above with a dot product and two vec4s + // TODO: Apply step to a matrix, rather than individually + + gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0); + } +); +#else +NSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r; + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + + float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity); + byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity); + byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity); + byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity); + byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity); + byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity); + byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity); + byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity); + + // TODO: Replace the above with a dot product and two vec4s + // TODO: Apply step to a matrix, rather than individually + + gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLocalBinaryPatternFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageLookupFilter.h b/GPUImage/Source/GPUImageLookupFilter.h new file mode 100644 index 0000000..f148704 --- /dev/null +++ b/GPUImage/Source/GPUImageLookupFilter.h @@ -0,0 +1,28 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageLookupFilter : GPUImageTwoInputFilter + +// How To Use: +// 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources. +// For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work). +// If you need more complex filter you can create as many lookup tables as required. +// E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png +// 2) Use you new lookup.png file as a second input for GPUImageLookupFilter. + +// See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example. + +// Additional Info: +// Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors: +//for (int by = 0; by < 8; by++) { +// for (int bx = 0; bx < 8; bx++) { +// for (int g = 0; g < 64; g++) { +// for (int r = 0; r < 64; r++) { +// image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5), +// (int)(g * 255.0 / 63.0 + 0.5), +// (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5))); +// } +// } +// } +//} + +@end diff --git a/GPUImage/Source/GPUImageLookupFilter.m b/GPUImage/Source/GPUImageLookupFilter.m new file mode 100644 index 0000000..23f45b9 --- /dev/null +++ b/GPUImage/Source/GPUImageLookupFilter.m @@ -0,0 +1,93 @@ +#import "GPUImageLookupFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; // TODO: This is not used + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; // lookup texture + + void main() + { + highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + highp float blueColor = textureColor.b * 63.0; + + highp vec2 quad1; + quad1.y = floor(floor(blueColor) / 8.0); + quad1.x = floor(blueColor) - (quad1.y * 8.0); + + highp vec2 quad2; + quad2.y = floor(ceil(blueColor) / 8.0); + quad2.x = ceil(blueColor) - (quad2.y * 8.0); + + highp vec2 texPos1; + texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r); + texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g); + + highp vec2 texPos2; + texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r); + texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g); + + lowp vec4 newColor1 = texture2D(inputImageTexture2, texPos1); + lowp vec4 newColor2 = texture2D(inputImageTexture2, texPos2); + + lowp vec4 newColor = mix(newColor1, newColor2, fract(blueColor)); + gl_FragColor = vec4(newColor.rgb, textureColor.w); + } +); +#else +NSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; // TODO: This is not used + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; // lookup texture + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + float blueColor = textureColor.b * 63.0; + + vec2 quad1; + quad1.y = floor(floor(blueColor) / 8.0); + quad1.x = floor(blueColor) - (quad1.y * 8.0); + + vec2 quad2; + quad2.y = floor(ceil(blueColor) / 8.0); + quad2.x = ceil(blueColor) - (quad2.y * 8.0); + + vec2 texPos1; + texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r); + texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g); + + vec2 texPos2; + texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r); + texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g); + + vec4 newColor1 = texture2D(inputImageTexture2, texPos1); + vec4 newColor2 = texture2D(inputImageTexture2, texPos2); + + vec4 newColor = mix(newColor1, newColor2, fract(blueColor)); + gl_FragColor = vec4(newColor.rgb, textureColor.w); + } +); +#endif + +@implementation GPUImageLookupFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLookupFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageLowPassFilter.h b/GPUImage/Source/GPUImageLowPassFilter.h new file mode 100644 index 0000000..be5c397 --- /dev/null +++ b/GPUImage/Source/GPUImageLowPassFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilterGroup.h" +#import "GPUImageBuffer.h" +#import "GPUImageDissolveBlendFilter.h" + +@interface GPUImageLowPassFilter : GPUImageFilterGroup +{ + GPUImageBuffer *bufferFilter; + GPUImageDissolveBlendFilter *dissolveBlendFilter; +} + +// This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5. +@property(readwrite, nonatomic) CGFloat filterStrength; + +@end diff --git a/GPUImage/Source/GPUImageLowPassFilter.m b/GPUImage/Source/GPUImageLowPassFilter.m new file mode 100644 index 0000000..541ba75 --- /dev/null +++ b/GPUImage/Source/GPUImageLowPassFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageLowPassFilter.h" + +@implementation GPUImageLowPassFilter + +@synthesize filterStrength; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // Take in the frame and blend it with the previous one + dissolveBlendFilter = [[GPUImageDissolveBlendFilter alloc] init]; + [self addFilter:dissolveBlendFilter]; + + // Buffer the result to be fed back into the blend + bufferFilter = [[GPUImageBuffer alloc] init]; + [self addFilter:bufferFilter]; + + // Texture location 0 needs to be the original image for the dissolve blend + [bufferFilter addTarget:dissolveBlendFilter atTextureLocation:1]; + [dissolveBlendFilter addTarget:bufferFilter]; + + [dissolveBlendFilter disableSecondFrameCheck]; + + // To prevent double updating of this filter, disable updates from the sharp image side + // self.inputFilterToIgnoreForUpdates = unsharpMaskFilter; + + self.initialFilters = [NSArray arrayWithObject:dissolveBlendFilter]; + self.terminalFilter = dissolveBlendFilter; + + self.filterStrength = 0.5; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setFilterStrength:(CGFloat)newValue; +{ + dissolveBlendFilter.mix = newValue; +} + +- (CGFloat)filterStrength; +{ + return dissolveBlendFilter.mix; +} + +@end diff --git a/GPUImage/Source/GPUImageLuminanceRangeFilter.h b/GPUImage/Source/GPUImageLuminanceRangeFilter.h new file mode 100644 index 0000000..5a31037 --- /dev/null +++ b/GPUImage/Source/GPUImageLuminanceRangeFilter.h @@ -0,0 +1,12 @@ +#import "GPUImageFilter.h" + +@interface GPUImageLuminanceRangeFilter : GPUImageFilter +{ + GLint rangeReductionUniform; +} + +/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6. + */ +@property(readwrite, nonatomic) CGFloat rangeReductionFactor; + +@end diff --git a/GPUImage/Source/GPUImageLuminanceRangeFilter.m b/GPUImage/Source/GPUImageLuminanceRangeFilter.m new file mode 100644 index 0000000..5122c95 --- /dev/null +++ b/GPUImage/Source/GPUImageLuminanceRangeFilter.m @@ -0,0 +1,76 @@ +#import "GPUImageLuminanceRangeFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float rangeReduction; + + // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham + const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + mediump float luminance = dot(textureColor.rgb, luminanceWeighting); + mediump float luminanceRatio = ((0.5 - luminance) * rangeReduction); + + gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w); + } +); +#else +NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float rangeReduction; + + // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham + const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, luminanceWeighting); + float luminanceRatio = ((0.5 - luminance) * rangeReduction); + + gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w); + } +); +#endif + +@implementation GPUImageLuminanceRangeFilter + +@synthesize rangeReductionFactor = _rangeReductionFactor; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceRangeFragmentShaderString])) + { + return nil; + } + + rangeReductionUniform = [filterProgram uniformIndex:@"rangeReduction"]; + self.rangeReductionFactor = 0.6; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setRangeReductionFactor:(CGFloat)newValue; +{ + _rangeReductionFactor = newValue; + + [self setFloat:_rangeReductionFactor forUniform:rangeReductionUniform program:filterProgram]; +} + + +@end diff --git a/GPUImage/Source/GPUImageLuminanceThresholdFilter.h b/GPUImage/Source/GPUImageLuminanceThresholdFilter.h new file mode 100755 index 0000000..0abb9a1 --- /dev/null +++ b/GPUImage/Source/GPUImageLuminanceThresholdFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilter.h" + +/** Pixels with a luminance above the threshold will appear white, and those below will be black + */ +@interface GPUImageLuminanceThresholdFilter : GPUImageFilter +{ + GLint thresholdUniform; +} + +/** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default + */ +@property(readwrite, nonatomic) CGFloat threshold; + +@end diff --git a/GPUImage/Source/GPUImageLuminanceThresholdFilter.m b/GPUImage/Source/GPUImageLuminanceThresholdFilter.m new file mode 100755 index 0000000..368b8fb --- /dev/null +++ b/GPUImage/Source/GPUImageLuminanceThresholdFilter.m @@ -0,0 +1,74 @@ +#import "GPUImageLuminanceThresholdFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform highp float threshold; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + highp float luminance = dot(textureColor.rgb, W); + highp float thresholdResult = step(threshold, luminance); + + gl_FragColor = vec4(vec3(thresholdResult), textureColor.w); + } +); +#else +NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float threshold; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, W); + float thresholdResult = step(threshold, luminance); + + gl_FragColor = vec4(vec3(thresholdResult), textureColor.w); + } +); +#endif + +@implementation GPUImageLuminanceThresholdFilter + +@synthesize threshold = _threshold; + +#pragma mark - +#pragma mark Initialization + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceThresholdFragmentShaderString])) + { + return nil; + } + + thresholdUniform = [filterProgram uniformIndex:@"threshold"]; + self.threshold = 0.5; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setThreshold:(CGFloat)newValue; +{ + _threshold = newValue; + + [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageLuminosity.h b/GPUImage/Source/GPUImageLuminosity.h new file mode 100644 index 0000000..b2d2458 --- /dev/null +++ b/GPUImage/Source/GPUImageLuminosity.h @@ -0,0 +1,17 @@ +#import "GPUImageAverageColor.h" + +@interface GPUImageLuminosity : GPUImageAverageColor +{ + GLProgram *secondFilterProgram; + GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute; + GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2; + GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform; +} + +// This block is called on the completion of color averaging for a frame +@property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime); + +- (void)extractLuminosityAtFrameTime:(CMTime)frameTime; +- (void)initializeSecondaryAttributes; + +@end diff --git a/GPUImage/Source/GPUImageLuminosity.m b/GPUImage/Source/GPUImageLuminosity.m new file mode 100644 index 0000000..4b6caa0 --- /dev/null +++ b/GPUImage/Source/GPUImageLuminosity.m @@ -0,0 +1,327 @@ +#import "GPUImageLuminosity.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + varying highp vec2 outputTextureCoordinate; + + varying highp vec2 upperLeftInputTextureCoordinate; + varying highp vec2 upperRightInputTextureCoordinate; + varying highp vec2 lowerLeftInputTextureCoordinate; + varying highp vec2 lowerRightInputTextureCoordinate; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + highp float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W); + highp float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W); + highp float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W); + highp float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W); + + highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance); + gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0); + } +); + +NSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + varying highp vec2 outputTextureCoordinate; + + varying highp vec2 upperLeftInputTextureCoordinate; + varying highp vec2 upperRightInputTextureCoordinate; + varying highp vec2 lowerLeftInputTextureCoordinate; + varying highp vec2 lowerRightInputTextureCoordinate; + + void main() + { + highp float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r; + highp float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r; + highp float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r; + highp float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r; + + highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance); + gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0); + } +); +#else +NSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 outputTextureCoordinate; + + varying vec2 upperLeftInputTextureCoordinate; + varying vec2 upperRightInputTextureCoordinate; + varying vec2 lowerLeftInputTextureCoordinate; + varying vec2 lowerRightInputTextureCoordinate; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W); + float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W); + float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W); + float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W); + + float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance); + gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0); + } +); + +NSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 outputTextureCoordinate; + + varying vec2 upperLeftInputTextureCoordinate; + varying vec2 upperRightInputTextureCoordinate; + varying vec2 lowerLeftInputTextureCoordinate; + varying vec2 lowerRightInputTextureCoordinate; + + void main() + { + float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r; + float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r; + float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r; + float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r; + + float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance); + gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0); + } +); +#endif + +@implementation GPUImageLuminosity + +@synthesize luminosityProcessingFinishedBlock = _luminosityProcessingFinishedBlock; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageInitialLuminosityFragmentShaderString])) + { + return nil; + } + + texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"]; + + stageTextures = [[NSMutableArray alloc] init]; + stageFramebuffers = [[NSMutableArray alloc] init]; + stageSizes = [[NSMutableArray alloc] init]; + + __unsafe_unretained GPUImageLuminosity *weakSelf = self; + [self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) { + [weakSelf extractLuminosityAtFrameTime:frameTime]; + }]; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageColorAveragingVertexShaderString fragmentShaderString:kGPUImageLuminosityFragmentShaderString]; + + if (!secondFilterProgram.initialized) + { + [self initializeSecondaryAttributes]; + + if (![secondFilterProgram link]) + { + NSString *progLog = [secondFilterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [secondFilterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [secondFilterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + filterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"]; + secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"]; + secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader + secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader + + secondFilterTexelWidthUniform = [secondFilterProgram uniformIndex:@"texelWidth"]; + secondFilterTexelHeightUniform = [secondFilterProgram uniformIndex:@"texelHeight"]; + + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + + glEnableVertexAttribArray(secondFilterPositionAttribute); + glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute); + }); + + return self; +} + +- (void)initializeSecondaryAttributes; +{ + [secondFilterProgram addAttribute:@"position"]; + [secondFilterProgram addAttribute:@"inputTextureCoordinate"]; +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + // Do an initial render pass that both convert to luminance and reduces + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + GLuint currentFramebuffer = [[stageFramebuffers objectAtIndex:0] intValue]; + glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGSize currentStageSize = [[stageSizes objectAtIndex:0] CGSizeValue]; +#else + NSSize currentStageSize = [[stageSizes objectAtIndex:0] sizeValue]; +#endif + glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height); + + GLuint currentTexture = sourceTexture; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, currentTexture); + + glUniform1i(filterInputTextureUniform, 2); + + glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width); + glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + currentTexture = [[stageTextures objectAtIndex:0] intValue]; + + // Just perform reductions from this point on + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + NSUInteger numberOfStageFramebuffers = [stageFramebuffers count]; + for (NSUInteger currentStage = 1; currentStage < numberOfStageFramebuffers; currentStage++) + { + currentFramebuffer = [[stageFramebuffers objectAtIndex:currentStage] intValue]; + glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + currentStageSize = [[stageSizes objectAtIndex:currentStage] CGSizeValue]; +#else + currentStageSize = [[stageSizes objectAtIndex:currentStage] sizeValue]; +#endif + glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, currentTexture); + + glUniform1i(secondFilterInputTextureUniform, 2); + + glUniform1f(secondFilterTexelWidthUniform, 0.5 / currentStageSize.width); + glUniform1f(secondFilterTexelHeightUniform, 0.5 / currentStageSize.height); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + currentTexture = [[stageTextures objectAtIndex:currentStage] intValue]; + +// NSUInteger totalBytesForImage = (int)currentStageSize.width * (int)currentStageSize.height * 4; +// GLubyte *rawImagePixels2 = (GLubyte *)malloc(totalBytesForImage); +// glReadPixels(0, 0, (int)currentStageSize.width, (int)currentStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels2); +// CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels2, totalBytesForImage, NULL); +// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB(); +// +// CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f; +// NSUInteger totalNumberOfPixels = totalBytesForImage / 4; +// +// for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++) +// { +// currentRedTotal += (CGFloat)rawImagePixels2[(currentPixel * 4)] / 255.0f; +// currentGreenTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 1] / 255.0f; +// currentBlueTotal += (CGFloat)rawImagePixels2[(currentPixel * 4 + 2)] / 255.0f; +// currentAlphaTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 3] / 255.0f; +// } +// +// NSLog(@"Stage %d average image red: %f, green: %f, blue: %f, alpha: %f", currentStage, currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels); +// +// +// CGImageRef cgImageFromBytes = CGImageCreate((int)currentStageSize.width, (int)currentStageSize.height, 8, 32, 4 * (int)currentStageSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault); +// +// UIImage *imageToSave = [UIImage imageWithCGImage:cgImageFromBytes]; +// +// NSData *dataForPNGFile = UIImagePNGRepresentation(imageToSave); +// +// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); +// NSString *documentsDirectory = [paths objectAtIndex:0]; +// +// NSString *imageName = [NSString stringWithFormat:@"AverageLevel%d.png", currentStage]; +// NSError *error = nil; +// if (![dataForPNGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:imageName] options:NSAtomicWrite error:&error]) +// { +// return; +// } + } +} + +#pragma mark - +#pragma mark Callbacks + +- (void)extractLuminosityAtFrameTime:(CMTime)frameTime; +{ + // we need a normal color texture for this filter + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGSize finalStageSize = [[stageSizes lastObject] CGSizeValue]; +#else + NSSize finalStageSize = [[stageSizes lastObject] sizeValue]; +#endif + NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height); + + if (rawImagePixels == NULL) + { + rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4); + } + + glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); + + NSUInteger luminanceTotal = 0; + NSUInteger byteIndex = 0; + for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++) + { + luminanceTotal += rawImagePixels[byteIndex]; + byteIndex += 4; + } + + CGFloat normalizedLuminosityTotal = (CGFloat)luminanceTotal / (CGFloat)totalNumberOfPixels / 255.0; + + if (_luminosityProcessingFinishedBlock != NULL) + { + _luminosityProcessingFinishedBlock(normalizedLuminosityTotal, frameTime); + } +} + + +@end diff --git a/GPUImage/Source/GPUImageLuminosityBlendFilter.h b/GPUImage/Source/GPUImageLuminosityBlendFilter.h new file mode 100644 index 0000000..03b5e4c --- /dev/null +++ b/GPUImage/Source/GPUImageLuminosityBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageLuminosityBlendFilter.m b/GPUImage/Source/GPUImageLuminosityBlendFilter.m new file mode 100644 index 0000000..7e39974 --- /dev/null +++ b/GPUImage/Source/GPUImageLuminosityBlendFilter.m @@ -0,0 +1,113 @@ +#import "GPUImageLuminosityBlendFilter.h" + +/** + * Luminosity blend mode based upon pseudo code from the PDF specification. + */ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + highp float lum(lowp vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + lowp vec3 clipcolor(lowp vec3 c) { + highp float l = lum(c); + lowp float n = min(min(c.r, c.g), c.b); + lowp float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + lowp vec3 setlum(lowp vec3 c, highp float l) { + highp float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + void main() + { + highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#else +NSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + float lum(vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + vec3 clipcolor(vec3 c) { + float l = lum(c); + float n = min(min(c.r, c.g), c.b); + float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + vec3 setlum(vec3 c, float l) { + float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + void main() + { + vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#endif + + +@implementation GPUImageLuminosityBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminosityBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageMaskFilter.h b/GPUImage/Source/GPUImageMaskFilter.h new file mode 100755 index 0000000..94cf064 --- /dev/null +++ b/GPUImage/Source/GPUImageMaskFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageMaskFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageMaskFilter.m b/GPUImage/Source/GPUImageMaskFilter.m new file mode 100755 index 0000000..bf3c84d --- /dev/null +++ b/GPUImage/Source/GPUImageMaskFilter.m @@ -0,0 +1,76 @@ +#import "GPUImageMaskFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageMaskShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + //Averages mask's the RGB values, and scales that value by the mask's alpha + // + //The dot product should take fewer cycles than doing an average normally + // + //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0 + lowp float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a; + + gl_FragColor = vec4(textureColor.xyz, newAlpha); +// gl_FragColor = vec4(textureColor2); + } +); +#else +NSString *const kGPUImageMaskShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + //Averages mask's the RGB values, and scales that value by the mask's alpha + // + //The dot product should take fewer cycles than doing an average normally + // + //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0 + float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a; + + gl_FragColor = vec4(textureColor.xyz, newAlpha); + // gl_FragColor = vec4(textureColor2); + } +); +#endif + +@implementation GPUImageMaskFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageMaskShaderString])) + { + return nil; + } + + return self; +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + glEnable(GL_BLEND); + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates sourceTexture:sourceTexture]; + glDisable(GL_BLEND); +} + +@end + diff --git a/GPUImage/Source/GPUImageMedianFilter.h b/GPUImage/Source/GPUImageMedianFilter.h new file mode 100644 index 0000000..8022578 --- /dev/null +++ b/GPUImage/Source/GPUImageMedianFilter.h @@ -0,0 +1,5 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +@interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter + +@end diff --git a/GPUImage/Source/GPUImageMedianFilter.m b/GPUImage/Source/GPUImageMedianFilter.m new file mode 100644 index 0000000..78c1180 --- /dev/null +++ b/GPUImage/Source/GPUImageMedianFilter.m @@ -0,0 +1,178 @@ +#import "GPUImageMedianFilter.h" + +/* + 3x3 median filter, adapted from "A Fast, Small-Radius GPU Median Filter" by Morgan McGuire in ShaderX6 + http://graphics.cs.williams.edu/papers/MedianShaderX6/ + + Morgan McGuire and Kyle Whitson + Williams College + + Register allocation tips by Victor Huang Xiaohuang + University of Illinois at Urbana-Champaign + + http://graphics.cs.williams.edu + + + Copyright (c) Morgan McGuire and Williams College, 2006 + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + +#define s2(a, b) temp = a; a = min(a, b); b = max(temp, b); +#define mn3(a, b, c) s2(a, b); s2(a, c); +#define mx3(a, b, c) s2(b, c); s2(a, c); + +#define mnmx3(a, b, c) mx3(a, b, c); s2(a, b); // 3 exchanges +#define mnmx4(a, b, c, d) s2(a, b); s2(c, d); s2(a, c); s2(b, d); // 4 exchanges +#define mnmx5(a, b, c, d, e) s2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e); // 6 exchanges +#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges + + void main() + { + vec3 v[6]; + + v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb; + v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb; + v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb; + v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb; + v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb; +// v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; +// v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb; + vec3 temp; + + mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]); + + v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + + mnmx5(v[1], v[2], v[3], v[4], v[5]); + + v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb; + + mnmx4(v[2], v[3], v[4], v[5]); + + v[5] = texture2D(inputImageTexture, textureCoordinate).rgb; + + mnmx3(v[3], v[4], v[5]); + + gl_FragColor = vec4(v[4], 1.0); + } +); +#else +NSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + +#define s2(a, b) temp = a; a = min(a, b); b = max(temp, b); +#define mn3(a, b, c) s2(a, b); s2(a, c); +#define mx3(a, b, c) s2(b, c); s2(a, c); + +#define mnmx3(a, b, c) mx3(a, b, c); s2(a, b); // 3 exchanges +#define mnmx4(a, b, c, d) s2(a, b); s2(c, d); s2(a, c); s2(b, d); // 4 exchanges +#define mnmx5(a, b, c, d, e) s2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e); // 6 exchanges +#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges + + void main() + { + vec3 v[6]; + + v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb; + v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb; + v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb; + v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb; + v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + // v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + // v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb; + vec3 temp; + + mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]); + + v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + + mnmx5(v[1], v[2], v[3], v[4], v[5]); + + v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb; + + mnmx4(v[2], v[3], v[4], v[5]); + + v[5] = texture2D(inputImageTexture, textureCoordinate).rgb; + + mnmx3(v[3], v[4], v[5]); + + gl_FragColor = vec4(v[4], 1.0); + } +); +#endif + +@implementation GPUImageMedianFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageMedianFragmentShaderString])) + { + return nil; + } + + hasOverriddenImageSizeFactor = NO; + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageMissEtikateFilter.h b/GPUImage/Source/GPUImageMissEtikateFilter.h new file mode 100755 index 0000000..de17064 --- /dev/null +++ b/GPUImage/Source/GPUImageMissEtikateFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImagePicture; + +/** A photo filter based on Photoshop action by Miss Etikate: + http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961 + */ + +// Note: If you want to use this effect you have to add lookup_miss_etikate.png +// from Resources folder to your application bundle. + +@interface GPUImageMissEtikateFilter : GPUImageFilterGroup +{ + GPUImagePicture *lookupImageSource; +} + +@end diff --git a/GPUImage/Source/GPUImageMissEtikateFilter.m b/GPUImage/Source/GPUImageMissEtikateFilter.m new file mode 100755 index 0000000..9117ad8 --- /dev/null +++ b/GPUImage/Source/GPUImageMissEtikateFilter.m @@ -0,0 +1,43 @@ +#import "GPUImageMissEtikateFilter.h" +#import "GPUImagePicture.h" +#import "GPUImageLookupFilter.h" + +@implementation GPUImageMissEtikateFilter + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + UIImage *image = [UIImage imageNamed:@"lookup_miss_etikate.png"]; +#else + NSImage *image = [NSImage imageNamed:@"lookup_miss_etikate.png"]; +#endif + + NSAssert(image, @"To use GPUImageMissEtikateFilter you need to add lookup_miss_etikate.png from GPUImage/framework/Resources to your application bundle."); + + lookupImageSource = [[GPUImagePicture alloc] initWithImage:image]; + GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init]; + [self addFilter:lookupFilter]; + + [lookupImageSource addTarget:lookupFilter atTextureLocation:1]; + [lookupImageSource processImage]; + + self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil]; + self.terminalFilter = lookupFilter; + + return self; +} + +-(void)prepareForImageCapture { + [lookupImageSource processImage]; + [super prepareForImageCapture]; +} + +#pragma mark - +#pragma mark Accessors + +@end diff --git a/GPUImage/Source/GPUImageMonochromeFilter.h b/GPUImage/Source/GPUImageMonochromeFilter.h new file mode 100644 index 0000000..66a0e77 --- /dev/null +++ b/GPUImage/Source/GPUImageMonochromeFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +@interface GPUImageMonochromeFilter : GPUImageFilter +{ + GLint intensityUniform, filterColorUniform; +} + +@property(readwrite, nonatomic) CGFloat intensity; +@property(readwrite, nonatomic) GPUVector4 color; + +- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; + +@end diff --git a/GPUImage/Source/GPUImageMonochromeFilter.m b/GPUImage/Source/GPUImageMonochromeFilter.m new file mode 100644 index 0000000..70cef87 --- /dev/null +++ b/GPUImage/Source/GPUImageMonochromeFilter.m @@ -0,0 +1,115 @@ +#import "GPUImageMonochromeFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float intensity; + uniform vec3 filterColor; + + const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + //desat, then apply overlay blend + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, luminanceWeighting); + + lowp vec4 desat = vec4(vec3(luminance), 1.0); + + //overlay + lowp vec4 outputColor = vec4( + (desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))), + (desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))), + (desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))), + 1.0 + ); + + //which is better, or are they equal? + gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a); + } +); +#else +NSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float intensity; + uniform vec3 filterColor; + + const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + //desat, then apply overlay blend + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, luminanceWeighting); + + vec4 desat = vec4(vec3(luminance), 1.0); + + //overlay + vec4 outputColor = vec4( + (desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))), + (desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))), + (desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))), + 1.0 + ); + + //which is better, or are they equal? + gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a); + } +); +#endif + +@implementation GPUImageMonochromeFilter + +@synthesize intensity = _intensity; +@synthesize color = _color; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUMonochromeFragmentShaderString])) + { + return nil; + } + + intensityUniform = [filterProgram uniformIndex:@"intensity"]; + filterColorUniform = [filterProgram uniformIndex:@"filterColor"]; + + self.intensity = 1.0; + self.color = (GPUVector4){0.6f, 0.45f, 0.3f, 1.f}; + //self.color = [CIColor colorWithRed:0.6 green:0.45 blue:0.3 alpha:1.]; + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setColor:(GPUVector4)color; +{ + + _color = color; + + [self setColorRed:color.one green:color.two blue:color.three]; +} + +- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; +{ + GPUVector3 filterColor = {redComponent, greenComponent, blueComponent}; + + [self setVec3:filterColor forUniform:filterColorUniform program:filterProgram]; +} + +- (void)setIntensity:(CGFloat)newValue; +{ + _intensity = newValue; + + [self setFloat:_intensity forUniform:intensityUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageMosaicFilter.h b/GPUImage/Source/GPUImageMosaicFilter.h new file mode 100644 index 0000000..ae829ec --- /dev/null +++ b/GPUImage/Source/GPUImageMosaicFilter.h @@ -0,0 +1,27 @@ + +// This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working + +#import "GPUImageTwoInputFilter.h" +#import "GPUImagePicture.h" + +@interface GPUImageMosaicFilter : GPUImageTwoInputFilter { + GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform; + GPUImagePicture *pic; +} + +// This filter takes an input tileset, the tiles must ascend in luminance +// It looks at the input image and replaces each display tile with an input tile +// according to the luminance of that tile. The idea was to replicate the ASCII +// video filters seen in other apps, but the tileset can be anything. +@property(readwrite, nonatomic) CGSize inputTileSize; +@property(readwrite, nonatomic) float numTiles; +@property(readwrite, nonatomic) CGSize displayTileSize; +@property(readwrite, nonatomic) BOOL colorOn; + +- (void)setNumTiles:(float)numTiles; +- (void)setDisplayTileSize:(CGSize)displayTileSize; +- (void)setInputTileSize:(CGSize)inputTileSize; +- (void)setTileSet:(NSString *)tileSet; +- (void)setColorOn:(BOOL)yes; + +@end diff --git a/GPUImage/Source/GPUImageMosaicFilter.m b/GPUImage/Source/GPUImageMosaicFilter.m new file mode 100644 index 0000000..ebe0997 --- /dev/null +++ b/GPUImage/Source/GPUImageMosaicFilter.m @@ -0,0 +1,187 @@ +// +// GPUImageMosaicFilter.m + + +#import "GPUImageMosaicFilter.h" +#import "GPUImagePicture.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform vec2 inputTileSize; + uniform vec2 displayTileSize; + uniform float numTiles; + uniform int colorOn; + + void main() + { + vec2 xy = textureCoordinate; + xy = xy - mod(xy, displayTileSize); + + vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0); + + vec4 inputColor = texture2D(inputImageTexture2, xy); + float lum = dot(inputColor,lumcoeff); + lum = 1.0 - lum; + + float stepsize = 1.0 / numTiles; + float lumStep = (lum - mod(lum, stepsize)) / stepsize; + + float rowStep = 1.0 / inputTileSize.x; + float x = mod(lumStep, rowStep); + float y = floor(lumStep / rowStep); + + vec2 startCoord = vec2(float(x) * inputTileSize.x, float(y) * inputTileSize.y); + vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize)); + + vec4 color = texture2D(inputImageTexture, finalCoord); + if (colorOn == 1) { + color = color * inputColor; + } + gl_FragColor = color; + + } +); +#else +NSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform vec2 inputTileSize; + uniform vec2 displayTileSize; + uniform float numTiles; + uniform int colorOn; + + void main() + { + vec2 xy = textureCoordinate; + xy = xy - mod(xy, displayTileSize); + + vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0); + + vec4 inputColor = texture2D(inputImageTexture2, xy); + float lum = dot(inputColor,lumcoeff); + lum = 1.0 - lum; + + float stepsize = 1.0 / numTiles; + float lumStep = (lum - mod(lum, stepsize)) / stepsize; + + float rowStep = 1.0 / inputTileSize.x; + float x = mod(lumStep, rowStep); + float y = floor(lumStep / rowStep); + + vec2 startCoord = vec2(float(x) * inputTileSize.x, float(y) * inputTileSize.y); + vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize)); + + vec4 color = texture2D(inputImageTexture, finalCoord); + if (colorOn == 1) { + color = color * inputColor; + } + gl_FragColor = color; + } +); +#endif + +@implementation GPUImageMosaicFilter + +@synthesize inputTileSize = _inputTileSize, numTiles = _numTiles, displayTileSize = _displayTileSize, colorOn = _colorOn; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageMosaicFragmentShaderString])) + { + return nil; + } + + inputTileSizeUniform = [filterProgram uniformIndex:@"inputTileSize"]; + displayTileSizeUniform = [filterProgram uniformIndex:@"displayTileSize"]; + numTilesUniform = [filterProgram uniformIndex:@"numTiles"]; + colorOnUniform = [filterProgram uniformIndex:@"colorOn"]; + + CGSize its = CGSizeMake(0.125, 0.125); + CGSize dts = CGSizeMake(0.025, 0.025); + [self setDisplayTileSize:dts]; + [self setInputTileSize:its]; + [self setNumTiles:64.0]; + [self setColorOn:YES]; + //[self setTileSet:@"squares.png"]; + return self; +} + +- (void)setColorOn:(BOOL)yes +{ + glUniform1i(colorOnUniform, yes); +} + +- (void)setNumTiles:(float)numTiles +{ + + _numTiles = numTiles; + [self setFloat:_numTiles forUniformName:@"numTiles"]; +} + +- (void)setInputTileSize:(CGSize)inputTileSize +{ + if (inputTileSize.width > 1.0) { + _inputTileSize.width = 1.0; + } + if (inputTileSize.height > 1.0) { + _inputTileSize.height = 1.0; + } + if (inputTileSize.width < 0.0) { + _inputTileSize.width = 0.0; + } + if (inputTileSize.height < 0.0) { + _inputTileSize.height = 0.0; + } + + + _inputTileSize = inputTileSize; + + [self setSize:_inputTileSize forUniform:inputTileSizeUniform program:filterProgram]; +} + +-(void)setDisplayTileSize:(CGSize)displayTileSize +{ + if (displayTileSize.width > 1.0) { + _displayTileSize.width = 1.0; + } + if (displayTileSize.height > 1.0) { + _displayTileSize.height = 1.0; + } + if (displayTileSize.width < 0.0) { + _displayTileSize.width = 0.0; + } + if (displayTileSize.height < 0.0) { + _displayTileSize.height = 0.0; + } + + + _displayTileSize = displayTileSize; + + [self setSize:_displayTileSize forUniform:displayTileSizeUniform program:filterProgram]; +} + +-(void)setTileSet:(NSString *)tileSet +{ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + UIImage *img = [UIImage imageNamed:tileSet]; +#else + NSImage *img = [NSImage imageNamed:tileSet]; +#endif + pic = [[GPUImagePicture alloc] initWithImage:img smoothlyScaleOutput:YES]; + [pic addTarget:self]; + [pic processImage]; +} + +@end diff --git a/GPUImage/Source/GPUImageMotionBlurFilter.h b/GPUImage/Source/GPUImageMotionBlurFilter.h new file mode 100644 index 0000000..dcca712 --- /dev/null +++ b/GPUImage/Source/GPUImageMotionBlurFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +@interface GPUImageMotionBlurFilter : GPUImageFilter + +/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0 + */ +@property (readwrite, nonatomic) CGFloat blurSize; + +/** The angular direction of the blur, in degrees. 0 degrees by default + */ +@property (readwrite, nonatomic) CGFloat blurAngle; + +@end diff --git a/GPUImage/Source/GPUImageMotionBlurFilter.m b/GPUImage/Source/GPUImageMotionBlurFilter.m new file mode 100644 index 0000000..5a2c20b --- /dev/null +++ b/GPUImage/Source/GPUImageMotionBlurFilter.m @@ -0,0 +1,209 @@ +#import "GPUImageMotionBlurFilter.h" + +// Override vertex shader to remove dependent texture reads +NSString *const kGPUImageTiltedTexelSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform vec2 directionalTexelStep; + + varying vec2 textureCoordinate; + varying vec2 oneStepBackTextureCoordinate; + varying vec2 twoStepsBackTextureCoordinate; + varying vec2 threeStepsBackTextureCoordinate; + varying vec2 fourStepsBackTextureCoordinate; + varying vec2 oneStepForwardTextureCoordinate; + varying vec2 twoStepsForwardTextureCoordinate; + varying vec2 threeStepsForwardTextureCoordinate; + varying vec2 fourStepsForwardTextureCoordinate; + + void main() + { + gl_Position = position; + + textureCoordinate = inputTextureCoordinate.xy; + oneStepBackTextureCoordinate = inputTextureCoordinate.xy - directionalTexelStep; + twoStepsBackTextureCoordinate = inputTextureCoordinate.xy - 2.0 * directionalTexelStep; + threeStepsBackTextureCoordinate = inputTextureCoordinate.xy - 3.0 * directionalTexelStep; + fourStepsBackTextureCoordinate = inputTextureCoordinate.xy - 4.0 * directionalTexelStep; + oneStepForwardTextureCoordinate = inputTextureCoordinate.xy + directionalTexelStep; + twoStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 2.0 * directionalTexelStep; + threeStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 3.0 * directionalTexelStep; + fourStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 4.0 * directionalTexelStep; + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING +( + precision highp float; + + uniform sampler2D inputImageTexture; + + varying vec2 textureCoordinate; + varying vec2 oneStepBackTextureCoordinate; + varying vec2 twoStepsBackTextureCoordinate; + varying vec2 threeStepsBackTextureCoordinate; + varying vec2 fourStepsBackTextureCoordinate; + varying vec2 oneStepForwardTextureCoordinate; + varying vec2 twoStepsForwardTextureCoordinate; + varying vec2 threeStepsForwardTextureCoordinate; + varying vec2 fourStepsForwardTextureCoordinate; + + void main() + { + // Box weights +// lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111; +// fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111; + + lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18; + fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15; + fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.12; + fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09; + fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05; + fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15; + fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.12; + fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09; + fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05; + + gl_FragColor = fragmentColor; + } +); +#else +NSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 textureCoordinate; + varying vec2 oneStepBackTextureCoordinate; + varying vec2 twoStepsBackTextureCoordinate; + varying vec2 threeStepsBackTextureCoordinate; + varying vec2 fourStepsBackTextureCoordinate; + varying vec2 oneStepForwardTextureCoordinate; + varying vec2 twoStepsForwardTextureCoordinate; + varying vec2 threeStepsForwardTextureCoordinate; + varying vec2 fourStepsForwardTextureCoordinate; + + void main() + { + // Box weights + // vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111; + // fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111; + + vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18; + fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15; + fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.12; + fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09; + fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05; + fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15; + fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.12; + fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09; + fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05; + + gl_FragColor = fragmentColor; + } +); +#endif + +@interface GPUImageMotionBlurFilter() +{ + GLint directionalTexelStepUniform; +} + +- (void)recalculateTexelOffsets; + +@end + +@implementation GPUImageMotionBlurFilter + +@synthesize blurSize = _blurSize; +@synthesize blurAngle = _blurAngle; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageTiltedTexelSamplingVertexShaderString fragmentShaderFromString:kGPUImageMotionBlurFragmentShaderString])) + { + return nil; + } + + directionalTexelStepUniform = [filterProgram uniformIndex:@"directionalTexelStep"]; + + self.blurSize = 2.5; + self.blurAngle = 0.0; + + return self; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self recalculateTexelOffsets]; + } +} + +- (void)recalculateTexelOffsets; +{ + CGFloat aspectRatio = 1.0; + CGPoint texelOffsets; + + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + aspectRatio = (inputTextureSize.width / inputTextureSize.height); + texelOffsets.x = _blurSize * sin(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.height; + texelOffsets.y = _blurSize * cos(_blurAngle * M_PI / 180.0) / inputTextureSize.height; + } + else + { + aspectRatio = (inputTextureSize.height / inputTextureSize.width); + texelOffsets.x = _blurSize * cos(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.width; + texelOffsets.y = _blurSize * sin(_blurAngle * M_PI / 180.0) / inputTextureSize.width; + } + + [self setPoint:texelOffsets forUniform:directionalTexelStepUniform program:filterProgram]; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self recalculateTexelOffsets]; +} + +- (void)setBlurAngle:(CGFloat)newValue; +{ + _blurAngle = newValue; + [self recalculateTexelOffsets]; +} + +- (void)setBlurSize:(CGFloat)newValue; +{ + _blurSize = newValue; + [self recalculateTexelOffsets]; +} + + +@end diff --git a/GPUImage/Source/GPUImageMotionDetector.h b/GPUImage/Source/GPUImageMotionDetector.h new file mode 100644 index 0000000..0132914 --- /dev/null +++ b/GPUImage/Source/GPUImageMotionDetector.h @@ -0,0 +1,18 @@ +#import "GPUImageFilterGroup.h" +#import "GPUImageLowPassFilter.h" +#import "GPUImageAverageColor.h" + +@interface GPUImageMotionDetector : GPUImageFilterGroup +{ + GPUImageLowPassFilter *lowPassFilter; + GPUImageTwoInputFilter *frameComparisonFilter; + GPUImageAverageColor *averageColor; +} + +// This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5. +@property(readwrite, nonatomic) CGFloat lowPassFilterStrength; + +// For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity. +@property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime); + +@end diff --git a/GPUImage/Source/GPUImageMotionDetector.m b/GPUImage/Source/GPUImageMotionDetector.m new file mode 100644 index 0000000..0e204ad --- /dev/null +++ b/GPUImage/Source/GPUImageMotionDetector.m @@ -0,0 +1,112 @@ +#import "GPUImageMotionDetector.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform highp float intensity; + + void main() + { + lowp vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb; + lowp vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb; + + mediump float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735 + lowp float movementThreshold = step(0.2, colorDistance); + + gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0); + } +); +#else +NSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform float intensity; + + void main() + { + vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb; + vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb; + + float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735 + float movementThreshold = step(0.2, colorDistance); + + gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0); + } +); +#endif + + +@implementation GPUImageMotionDetector + +@synthesize lowPassFilterStrength, motionDetectionBlock; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // Start with a low pass filter to define the component to be removed + lowPassFilter = [[GPUImageLowPassFilter alloc] init]; + [self addFilter:lowPassFilter]; + + // Take the difference of the current frame from the low pass filtered result to get the high pass + frameComparisonFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageMotionComparisonFragmentShaderString]; + [self addFilter:frameComparisonFilter]; + + // Texture location 0 needs to be the original image for the difference blend + [lowPassFilter addTarget:frameComparisonFilter atTextureLocation:1]; + + // End with the average color for the scene to determine the centroid + averageColor = [[GPUImageAverageColor alloc] init]; + + __unsafe_unretained GPUImageMotionDetector *weakSelf = self; + + [averageColor setColorAverageProcessingFinishedBlock:^(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime) { + if (weakSelf.motionDetectionBlock != NULL) + { + weakSelf.motionDetectionBlock(CGPointMake(redComponent / alphaComponent, greenComponent / alphaComponent), alphaComponent, frameTime); + } +// NSLog(@"Average X: %f, Y: %f total: %f", redComponent / alphaComponent, greenComponent / alphaComponent, alphaComponent); + }]; + + [frameComparisonFilter addTarget:averageColor]; + + self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, frameComparisonFilter, nil]; + self.terminalFilter = frameComparisonFilter; + + self.lowPassFilterStrength = 0.5; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setLowPassFilterStrength:(CGFloat)newValue; +{ + lowPassFilter.filterStrength = newValue; +} + +- (CGFloat)lowPassFilterStrength; +{ + return lowPassFilter.filterStrength; +} + + +@end diff --git a/GPUImage/Source/GPUImageMovie.h b/GPUImage/Source/GPUImageMovie.h new file mode 100755 index 0000000..7952e85 --- /dev/null +++ b/GPUImage/Source/GPUImageMovie.h @@ -0,0 +1,56 @@ +#import +#import +#import "GPUImageContext.h" +#import "GPUImageOutput.h" + +/** Protocol for getting Movie played callback. + */ +@protocol GPUImageMovieDelegate + +- (void)didCompletePlayingMovie; +@end + +/** Source object for filtering movies + */ +@interface GPUImageMovie : GPUImageOutput + +@property (readwrite, retain) AVAsset *asset; +@property (readwrite, retain) AVPlayerItem *playerItem; +@property(readwrite, retain) NSURL *url; + +/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console + */ +@property(readwrite, nonatomic) BOOL runBenchmark; + +/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO. + */ +@property(readwrite, nonatomic) BOOL playAtActualSpeed; + +/** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO. + */ +@property(readwrite, nonatomic) BOOL shouldRepeat; + +/** This is used to send the delete Movie did complete playing alert + */ +@property (readwrite, nonatomic, assign) id delegate; + +@property (readonly, nonatomic) AVAssetReader *assetReader; +@property (readonly, nonatomic) BOOL audioEncodingIsFinished; +@property (readonly, nonatomic) BOOL videoEncodingIsFinished; + +/// @name Initialization and teardown +- (id)initWithAsset:(AVAsset *)asset; +- (id)initWithPlayerItem:(AVPlayerItem *)playerItem; +- (id)initWithURL:(NSURL *)url; +- (void)textureCacheSetup; + +/// @name Movie processing +- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter; +- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput; +- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput; +- (void)startProcessing; +- (void)endProcessing; +- (void)cancelProcessing; +- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer; + +@end diff --git a/GPUImage/Source/GPUImageMovie.m b/GPUImage/Source/GPUImageMovie.m new file mode 100755 index 0000000..178dd27 --- /dev/null +++ b/GPUImage/Source/GPUImageMovie.m @@ -0,0 +1,751 @@ +#import "GPUImageMovie.h" +#import "GPUImageMovieWriter.h" +#import "GPUImageFilter.h" +#import "GPUImageVideoCamera.h" + +@interface GPUImageMovie () +{ + BOOL audioEncodingIsFinished, videoEncodingIsFinished; + GPUImageMovieWriter *synchronizedMovieWriter; + CVOpenGLESTextureCacheRef coreVideoTextureCache; + AVAssetReader *reader; + AVPlayerItemVideoOutput *playerItemOutput; + CADisplayLink *displayLink; + CMTime previousFrameTime; + CFAbsoluteTime previousActualFrameTime; + BOOL keepLooping; + + GLuint luminanceTexture, chrominanceTexture; + + GLProgram *yuvConversionProgram; + GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute; + GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform; + GLint yuvConversionMatrixUniform; + GLuint yuvConversionFramebuffer; + const GLfloat *_preferredConversion; + + int imageBufferWidth, imageBufferHeight; +} + +- (void)processAsset; + +@end + +@implementation GPUImageMovie + +@synthesize url = _url; +@synthesize asset = _asset; +@synthesize runBenchmark = _runBenchmark; +@synthesize playAtActualSpeed = _playAtActualSpeed; +@synthesize delegate = _delegate; +@synthesize shouldRepeat = _shouldRepeat; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithURL:(NSURL *)url; +{ + if (!(self = [super init])) + { + return nil; + } + + [self textureCacheSetup]; + + self.url = url; + self.asset = nil; + + return self; +} + +- (id)initWithAsset:(AVAsset *)asset; +{ + if (!(self = [super init])) + { + return nil; + } + + [self textureCacheSetup]; + + self.url = nil; + self.asset = asset; + + return self; +} + +- (id)initWithPlayerItem:(AVPlayerItem *)playerItem; +{ + if (!(self = [super init])) + { + return nil; + } + + [self textureCacheSetup]; + + self.url = nil; + self.asset = nil; + self.playerItem = playerItem; + + return self; +} + +- (void)textureCacheSetup; +{ + if ([GPUImageContext supportsFastTextureUpload]) + { + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + _preferredConversion = kColorConversion709; + yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString]; + + if (!yuvConversionProgram.initialized) + { + [yuvConversionProgram addAttribute:@"position"]; + [yuvConversionProgram addAttribute:@"inputTextureCoordinate"]; + + if (![yuvConversionProgram link]) + { + NSString *progLog = [yuvConversionProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [yuvConversionProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [yuvConversionProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + yuvConversionProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"]; + yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"]; + yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"]; + yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"]; + yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"]; + + [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; + + glEnableVertexAttribArray(yuvConversionPositionAttribute); + glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute); + +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache); +#endif + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Need to remove the initially created texture + [self deleteOutputTexture]; + }); + } +} + +- (void)dealloc +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [displayLink removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode]; + displayLink = nil; + }); + if ([GPUImageContext supportsFastTextureUpload]) + { + CFRelease(coreVideoTextureCache); + [self destroyYUVConversionFBO]; + } +} + +#pragma mark - +#pragma mark Movie processing + +- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter; +{ + synchronizedMovieWriter = movieWriter; + movieWriter.encodingLiveVideo = NO; +} + +- (void)startProcessing +{ + if( self.playerItem ) { + [self processPlayerItem]; + return; + } + if(self.url == nil) + { + [self processAsset]; + return; + } + + if (_shouldRepeat) keepLooping = YES; + + previousFrameTime = kCMTimeZero; + previousActualFrameTime = CFAbsoluteTimeGetCurrent(); + + NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]; + AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions]; + + GPUImageMovie __block *blockSelf = self; + + [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{ + runSynchronouslyOnVideoProcessingQueue(^{ + NSError *error = nil; + AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error]; + if (!tracksStatus == AVKeyValueStatusLoaded) + { + return; + } + blockSelf.asset = inputAsset; + [blockSelf processAsset]; + blockSelf = nil; + }); + }]; +} + +- (AVAssetReader*)createAssetReader +{ + NSError *error = nil; + AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error]; + + NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}; + // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding + AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings]; + readerVideoTrackOutput.alwaysCopiesSampleData = NO; + [assetReader addOutput:readerVideoTrackOutput]; + + NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio]; + BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) ); + AVAssetReaderTrackOutput *readerAudioTrackOutput = nil; + + if (shouldRecordAudioTrack) + { + [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES]; + + // This might need to be extended to handle movies with more than one audio track + AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0]; + readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; + readerAudioTrackOutput.alwaysCopiesSampleData = NO; + [assetReader addOutput:readerAudioTrackOutput]; + } + + return assetReader; +} + +- (void)processAsset +{ + reader = [self createAssetReader]; + + AVAssetReaderOutput *readerVideoTrackOutput = nil; + AVAssetReaderOutput *readerAudioTrackOutput = nil; + + audioEncodingIsFinished = YES; + for( AVAssetReaderOutput *output in reader.outputs ) { + if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) { + audioEncodingIsFinished = NO; + readerAudioTrackOutput = output; + } + else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) { + readerVideoTrackOutput = output; + } + } + + if ([reader startReading] == NO) + { + NSLog(@"Error reading from file at URL: %@", self.url); + return; + } + + __unsafe_unretained GPUImageMovie *weakSelf = self; + + if (synchronizedMovieWriter != nil) + { + [synchronizedMovieWriter setVideoInputReadyCallback:^{ + return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput]; + }]; + + [synchronizedMovieWriter setAudioInputReadyCallback:^{ + return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput]; + }]; + + [synchronizedMovieWriter enableSynchronizationCallbacks]; + } + else + { + while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping)) + { + [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput]; + + if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) ) + { + [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput]; + } + + } + + if (reader.status == AVAssetWriterStatusCompleted) { + + [reader cancelReading]; + + if (keepLooping) { + reader = nil; + dispatch_async(dispatch_get_main_queue(), ^{ + [self startProcessing]; + }); + } else { + [weakSelf endProcessing]; + } + + } + } +} + +- (void)processPlayerItem +{ + runSynchronouslyOnVideoProcessingQueue(^{ + displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; + [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode]; + [displayLink setPaused:YES]; + + dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue]; + NSDictionary *pixBuffAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}; + playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes]; + [playerItemOutput setDelegate:self queue:videoProcessingQueue]; + + [_playerItem addOutput:playerItemOutput]; + [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1]; + }); +} + +- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender +{ + // Restart display link. + [displayLink setPaused:NO]; +} + +- (void)displayLinkCallback:(CADisplayLink *)sender +{ + /* + The callback gets called once every Vsync. + Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time + This pixel buffer can then be processed and later rendered on screen. + */ + // Calculate the nextVsync time which is when the screen will be refreshed next. + CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]); + + CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync]; + + if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) { + __unsafe_unretained GPUImageMovie *weakSelf = self; + CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL]; + if( pixelBuffer ) + runSynchronouslyOnVideoProcessingQueue(^{ + [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime]; + CFRelease(pixelBuffer); + }); + } +} + +- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput; +{ + if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished) + { + CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer]; + if (sampleBufferRef) + { + //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef)))); + if (_playAtActualSpeed) + { + // Do this outside of the video processing queue to not slow that down while waiting + CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef); + CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime); + CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent(); + + CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame); + CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime; + + if (frameTimeDifference > actualTimeDifference) + { + usleep(1000000.0 * (frameTimeDifference - actualTimeDifference)); + } + + previousFrameTime = currentSampleTime; + previousActualFrameTime = CFAbsoluteTimeGetCurrent(); + } + + __unsafe_unretained GPUImageMovie *weakSelf = self; + runSynchronouslyOnVideoProcessingQueue(^{ + [weakSelf processMovieFrame:sampleBufferRef]; + CMSampleBufferInvalidate(sampleBufferRef); + CFRelease(sampleBufferRef); + }); + + return YES; + } + else + { + if (!keepLooping) { + videoEncodingIsFinished = YES; + if( videoEncodingIsFinished && audioEncodingIsFinished ) + [self endProcessing]; + } + } + } + else if (synchronizedMovieWriter != nil) + { + if (reader.status == AVAssetReaderStatusCompleted) + { + [self endProcessing]; + } + } + return NO; +} + +- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput; +{ + if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished) + { + CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer]; + if (audioSampleBufferRef) + { + //NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef)))); + [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef]; + CFRelease(audioSampleBufferRef); + return YES; + } + else + { + if (!keepLooping) { + audioEncodingIsFinished = YES; + if( videoEncodingIsFinished && audioEncodingIsFinished ) + [self endProcessing]; + } + } + } + else if (synchronizedMovieWriter != nil) + { + if (reader.status == AVAssetReaderStatusCompleted) + { + [self endProcessing]; + } + } + return NO; +} + +- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer; +{ +// CMTimeGetSeconds +// CMTimeSubtract + + CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer); + CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer); + [self processMovieFrame:movieFrame withSampleTime:currentSampleTime]; +} + +- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime +{ + int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame); +#if TARGET_IPHONE_SIMULATOR + int bufferWidth = (int) CVPixelBufferGetBytesPerRow(movieFrame) / 4; // This works around certain movie frame types on the Simulator (see https://github.com/BradLarson/GPUImage/issues/424) +#else + int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame); +#endif + CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL); + if (colorAttachments == kCVImageBufferYCbCrMatrix_ITU_R_601_4) { + _preferredConversion = kColorConversion601; + } + else { + _preferredConversion = kColorConversion709; + } + + CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + if ([GPUImageContext supportsFastTextureUpload]) + { + CVOpenGLESTextureRef luminanceTextureRef = NULL; + CVOpenGLESTextureRef chrominanceTextureRef = NULL; + CVOpenGLESTextureRef texture = NULL; + + // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion + { + + if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) ) + { + imageBufferWidth = bufferWidth; + imageBufferHeight = bufferHeight; + + [self destroyYUVConversionFBO]; + [self createYUVConversionFBO]; + } + + CVReturn err; + // Y-plane + glActiveTexture(GL_TEXTURE4); + if ([GPUImageContext deviceSupportsRedTextures]) + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); + } + else + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); + } + if (err) + { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef); + glBindTexture(GL_TEXTURE_2D, luminanceTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + // UV-plane + glActiveTexture(GL_TEXTURE5); + if ([GPUImageContext deviceSupportsRedTextures]) + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); + } + else + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); + } + if (err) + { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef); + glBindTexture(GL_TEXTURE_2D, chrominanceTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + if (!allTargetsWantMonochromeData) + { + [self convertYUVToRGBOutput]; + } + + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex]; + [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex]; + [currentTarget setTextureDelegate:self atIndex:targetTextureIndex]; + + [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex]; + } + + CVPixelBufferUnlockBaseAddress(movieFrame, 0); + CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0); + CFRelease(luminanceTextureRef); + CFRelease(chrominanceTextureRef); + } + else + { + CVPixelBufferLockBaseAddress(movieFrame, 0); + + CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture); + + if (!texture || err) { + NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err); + NSAssert(NO, @"Camera failure"); + return; + } + + outputTexture = CVOpenGLESTextureGetName(texture); + // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex]; + [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex]; + [currentTarget setTextureDelegate:self atIndex:targetTextureIndex]; + + [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex]; + } + + CVPixelBufferUnlockBaseAddress(movieFrame, 0); + CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0); + CFRelease(texture); + + outputTexture = 0; + } + } + else + { + // Upload to texture + CVPixelBufferLockBaseAddress(movieFrame, 0); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + // Using BGRA extension to pull in video frame data directly + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + bufferWidth, + bufferHeight, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, + CVPixelBufferGetBaseAddress(movieFrame)); + + CGSize currentSize = CGSizeMake(bufferWidth, bufferHeight); + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:currentSize atIndex:targetTextureIndex]; + [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex]; + } + CVPixelBufferUnlockBaseAddress(movieFrame, 0); + } + + if (_runBenchmark) + { + CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); + NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime); + } +} + +- (void)endProcessing; +{ + keepLooping = NO; + [displayLink setPaused:YES]; + + for (id currentTarget in targets) + { + [currentTarget endProcessing]; + } + + if (synchronizedMovieWriter != nil) + { + [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}]; + [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}]; + } + + if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) { + [self.delegate didCompletePlayingMovie]; + } + self.delegate = nil; +} + +- (void)cancelProcessing +{ + if (reader) { + [reader cancelReading]; + } + [self endProcessing]; +} + +- (void)convertYUVToRGBOutput; +{ + [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; + [self setYUVConversionFBO]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat textureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, luminanceTexture); + glUniform1i(yuvConversionLuminanceTextureUniform, 4); + + glActiveTexture(GL_TEXTURE5); + glBindTexture(GL_TEXTURE_2D, chrominanceTexture); + glUniform1i(yuvConversionChrominanceTextureUniform, 5); + + glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion); + + glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)setYUVConversionFBO; +{ + if (!yuvConversionFramebuffer) + { + [self createYUVConversionFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, yuvConversionFramebuffer); + + glViewport(0, 0, imageBufferWidth, imageBufferHeight); +} + +- (void)createYUVConversionFBO; +{ + [self initializeOutputTextureIfNeeded]; + + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &yuvConversionFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, yuvConversionFramebuffer); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageBufferWidth, imageBufferHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + [self notifyTargetsAboutNewOutputTexture]; + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + glBindTexture(GL_TEXTURE_2D, 0); + +} + +- (void)destroyYUVConversionFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (yuvConversionFramebuffer) + { + glDeleteFramebuffers(1, &yuvConversionFramebuffer); + yuvConversionFramebuffer = 0; + } + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + }); +} + +- (AVAssetReader*)assetReader { + return reader; +} + +- (BOOL)audioEncodingIsFinished { + return audioEncodingIsFinished; +} + +- (BOOL)videoEncodingIsFinished { + return videoEncodingIsFinished; +} + +@end diff --git a/GPUImage/Source/GPUImageMovieComposition.h b/GPUImage/Source/GPUImageMovieComposition.h new file mode 100644 index 0000000..00e4381 --- /dev/null +++ b/GPUImage/Source/GPUImageMovieComposition.h @@ -0,0 +1,21 @@ +// +// GPUImageMovieComposition.h +// Givit +// +// Created by Sean Meiners on 2013/01/25. +// +// + +#import "GPUImageMovie.h" + +@interface GPUImageMovieComposition : GPUImageMovie + +@property (readwrite, retain) AVComposition *compositon; +@property (readwrite, retain) AVVideoComposition *videoComposition; +@property (readwrite, retain) AVAudioMix *audioMix; + +- (id)initWithComposition:(AVComposition*)compositon + andVideoComposition:(AVVideoComposition*)videoComposition + andAudioMix:(AVAudioMix*)audioMix; + +@end diff --git a/GPUImage/Source/GPUImageMovieComposition.m b/GPUImage/Source/GPUImageMovieComposition.m new file mode 100644 index 0000000..c71f7e1 --- /dev/null +++ b/GPUImage/Source/GPUImageMovieComposition.m @@ -0,0 +1,70 @@ +// +// GPUImageMovieComposition.m +// Givit +// +// Created by Sean Meiners on 2013/01/25. +// +// + +#import "GPUImageMovieComposition.h" +#import "GPUImageMovieWriter.h" + +@implementation GPUImageMovieComposition + +@synthesize compositon = _compositon; +@synthesize videoComposition = _videoComposition; +@synthesize audioMix = _audioMix; + +- (id)initWithComposition:(AVComposition*)compositon + andVideoComposition:(AVVideoComposition*)videoComposition + andAudioMix:(AVAudioMix*)audioMix { + if (!(self = [super init])) + { + return nil; + } + + [self textureCacheSetup]; + + self.compositon = compositon; + self.videoComposition = videoComposition; + self.audioMix = audioMix; + + return self; +} + +- (AVAssetReader*)createAssetReader + { + //NSLog(@"creating reader from composition: %@, video: %@, audio: %@ with duration: %@", _compositon, _videoComposition, _audioMix, CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, _compositon.duration))); + + NSError *error = nil; + AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.compositon error:&error]; + + NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}; + AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[_compositon tracksWithMediaType:AVMediaTypeVideo] + videoSettings:outputSettings]; +#if ! TARGET_IPHONE_SIMULATOR + if( [_videoComposition isKindOfClass:[AVMutableVideoComposition class]] ) + [(AVMutableVideoComposition*)_videoComposition setRenderScale:1.0]; +#endif + readerVideoOutput.videoComposition = self.videoComposition; + readerVideoOutput.alwaysCopiesSampleData = NO; + [assetReader addOutput:readerVideoOutput]; + + NSArray *audioTracks = [_compositon tracksWithMediaType:AVMediaTypeAudio]; + BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) ); + AVAssetReaderAudioMixOutput *readerAudioOutput = nil; + + if (shouldRecordAudioTrack) + { + [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES]; + + readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil]; + readerAudioOutput.audioMix = self.audioMix; + readerAudioOutput.alwaysCopiesSampleData = NO; + [assetReader addOutput:readerAudioOutput]; + } + + return assetReader; +} + +@end diff --git a/GPUImage/Source/GPUImageMultiplyBlendFilter.h b/GPUImage/Source/GPUImageMultiplyBlendFilter.h new file mode 100755 index 0000000..5ebc28b --- /dev/null +++ b/GPUImage/Source/GPUImageMultiplyBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageMultiplyBlendFilter.m b/GPUImage/Source/GPUImageMultiplyBlendFilter.m new file mode 100755 index 0000000..ed64707 --- /dev/null +++ b/GPUImage/Source/GPUImageMultiplyBlendFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageMultiplyBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 base = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a); + } +); +#else +NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a); + } +); +#endif + +@implementation GPUImageMultiplyBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageMultiplyBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageNobleCornerDetectionFilter.h b/GPUImage/Source/GPUImageNobleCornerDetectionFilter.h new file mode 100644 index 0000000..963fd66 --- /dev/null +++ b/GPUImage/Source/GPUImageNobleCornerDetectionFilter.h @@ -0,0 +1,12 @@ +#import "GPUImageHarrisCornerDetectionFilter.h" + +/** Noble corner detector + + This is the Noble variant on the Harris detector, from + Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45. +*/ + + +@interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter + +@end diff --git a/GPUImage/Source/GPUImageNobleCornerDetectionFilter.m b/GPUImage/Source/GPUImageNobleCornerDetectionFilter.m new file mode 100644 index 0000000..aa6b304 --- /dev/null +++ b/GPUImage/Source/GPUImageNobleCornerDetectionFilter.m @@ -0,0 +1,74 @@ +#import "GPUImageNobleCornerDetectionFilter.h" + +@implementation GPUImageNobleCornerDetectionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float sensitivity; + + void main() + { + mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb; + + mediump float derivativeSum = derivativeElements.x + derivativeElements.y; + + // R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2) + mediump float zElement = (derivativeElements.z * 2.0) - 1.0; + // mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum); + mediump float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum); + + // Original Harris detector + // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2 + // highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum; + + // gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0); + gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0); + } +); +#else +NSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float sensitivity; + + void main() + { + vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb; + + float derivativeSum = derivativeElements.x + derivativeElements.y; + + // R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2) + float zElement = (derivativeElements.z * 2.0) - 1.0; + // mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum); + float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum); + + // Original Harris detector + // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2 + // highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum; + + // gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0); + gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageNobleCornerDetectionFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageNonMaximumSuppressionFilter.h b/GPUImage/Source/GPUImageNonMaximumSuppressionFilter.h new file mode 100644 index 0000000..fd8fe6d --- /dev/null +++ b/GPUImage/Source/GPUImageNonMaximumSuppressionFilter.h @@ -0,0 +1,5 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +@interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter + +@end diff --git a/GPUImage/Source/GPUImageNonMaximumSuppressionFilter.m b/GPUImage/Source/GPUImageNonMaximumSuppressionFilter.m new file mode 100644 index 0000000..1ea1316 --- /dev/null +++ b/GPUImage/Source/GPUImageNonMaximumSuppressionFilter.m @@ -0,0 +1,107 @@ +#import "GPUImageNonMaximumSuppressionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying highp vec2 textureCoordinate; + varying highp vec2 leftTextureCoordinate; + varying highp vec2 rightTextureCoordinate; + + varying highp vec2 topTextureCoordinate; + varying highp vec2 topLeftTextureCoordinate; + varying highp vec2 topRightTextureCoordinate; + + varying highp vec2 bottomTextureCoordinate; + varying highp vec2 bottomLeftTextureCoordinate; + varying highp vec2 bottomRightTextureCoordinate; + + void main() + { + lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r; + lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r; + lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r; + lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r; + lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r; + lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + + // Use a tiebreaker for pixels to the left and immediately above this one + lowp float multiplier = 1.0 - step(centerColor.r, topColor); + multiplier = multiplier * 1.0 - step(centerColor.r, topLeftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, leftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, bottomLeftColor); + + lowp float maxValue = max(centerColor.r, bottomColor); + maxValue = max(maxValue, bottomRightColor); + maxValue = max(maxValue, rightColor); + maxValue = max(maxValue, topRightColor); + + gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0); + } +); +#else +NSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + void main() + { + float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r; + float topColor = texture2D(inputImageTexture, topTextureCoordinate).r; + float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + + // Use a tiebreaker for pixels to the left and immediately above this one + float multiplier = 1.0 - step(centerColor.r, topColor); + multiplier = multiplier * 1.0 - step(centerColor.r, topLeftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, leftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, bottomLeftColor); + + float maxValue = max(centerColor.r, bottomColor); + maxValue = max(maxValue, bottomRightColor); + maxValue = max(maxValue, rightColor); + maxValue = max(maxValue, topRightColor); + + gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0); + } +); +#endif + +@implementation GPUImageNonMaximumSuppressionFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageNonMaximumSuppressionFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageNormalBlendFilter.h b/GPUImage/Source/GPUImageNormalBlendFilter.h new file mode 100644 index 0000000..ce5e22b --- /dev/null +++ b/GPUImage/Source/GPUImageNormalBlendFilter.h @@ -0,0 +1,8 @@ +// Created by Jorge Garcia on 9/5/12. +// + +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageNormalBlendFilter.m b/GPUImage/Source/GPUImageNormalBlendFilter.m new file mode 100644 index 0000000..f5b5069 --- /dev/null +++ b/GPUImage/Source/GPUImageNormalBlendFilter.m @@ -0,0 +1,96 @@ +// Created by Jorge Garcia on 9/5/12. + +#import "GPUImageNormalBlendFilter.h" +/* + This equation is a simplification of the general blending equation. It assumes the destination color is opaque, and therefore drops the destination color's alpha term. + + D = C1 * C1a + C2 * C2a * (1 - C1a) + where D is the resultant color, C1 is the color of the first element, C1a is the alpha of the first element, C2 is the second element color, C2a is the alpha of the second element. The destination alpha is calculated with: + + Da = C1a + C2a * (1 - C1a) + The resultant color is premultiplied with the alpha. To restore the color to the unmultiplied values, just divide by Da, the resultant alpha. + + http://stackoverflow.com/questions/1724946/blend-mode-on-a-transparent-and-semi-transparent-background + + For some reason Photoshop behaves + D = C1 + C2 * C2a * (1 - C1a) + */ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 c2 = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2); + + lowp vec4 outputColor; + +// outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a); +// outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a); +// outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a); +// outputColor.a = c1.a + c2.a * (1.0 - c1.a); + + lowp float a = c1.a + c2.a * (1.0 - c1.a); + lowp float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output + + outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor; + outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor; + outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor; + outputColor.a = a; + + gl_FragColor = outputColor; + } +); +#else +NSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 c2 = texture2D(inputImageTexture, textureCoordinate); + vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2); + + vec4 outputColor; + + // outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a); + // outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a); + // outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a); + // outputColor.a = c1.a + c2.a * (1.0 - c1.a); + + float a = c1.a + c2.a * (1.0 - c1.a); + float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output + + outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor; + outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor; + outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor; + outputColor.a = a; + + gl_FragColor = outputColor; + } +); +#endif + +@implementation GPUImageNormalBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageNormalBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageOpacityFilter.h b/GPUImage/Source/GPUImageOpacityFilter.h new file mode 100644 index 0000000..826749f --- /dev/null +++ b/GPUImage/Source/GPUImageOpacityFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageFilter.h" + +@interface GPUImageOpacityFilter : GPUImageFilter +{ + GLint opacityUniform; +} + +// Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting +@property(readwrite, nonatomic) CGFloat opacity; + +@end diff --git a/GPUImage/Source/GPUImageOpacityFilter.m b/GPUImage/Source/GPUImageOpacityFilter.m new file mode 100644 index 0000000..b74acb6 --- /dev/null +++ b/GPUImage/Source/GPUImageOpacityFilter.m @@ -0,0 +1,65 @@ +#import "GPUImageOpacityFilter.h" + +@implementation GPUImageOpacityFilter + +@synthesize opacity = _opacity; + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float opacity; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity); + } +); +#else +NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float opacity; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageOpacityFragmentShaderString])) + { + return nil; + } + + opacityUniform = [filterProgram uniformIndex:@"opacity"]; + self.opacity = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setOpacity:(CGFloat)newValue; +{ + _opacity = newValue; + + [self setFloat:_opacity forUniform:opacityUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageOpeningFilter.h b/GPUImage/Source/GPUImageOpeningFilter.h new file mode 100644 index 0000000..3e4f754 --- /dev/null +++ b/GPUImage/Source/GPUImageOpeningFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageErosionFilter; +@class GPUImageDilationFilter; + +// A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius. +// This helps to filter out smaller bright elements. + +@interface GPUImageOpeningFilter : GPUImageFilterGroup +{ + GPUImageErosionFilter *erosionFilter; + GPUImageDilationFilter *dilationFilter; +} + +@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing; + +- (id)initWithRadius:(NSUInteger)radius; + +@end diff --git a/GPUImage/Source/GPUImageOpeningFilter.m b/GPUImage/Source/GPUImageOpeningFilter.m new file mode 100644 index 0000000..4e7a565 --- /dev/null +++ b/GPUImage/Source/GPUImageOpeningFilter.m @@ -0,0 +1,57 @@ +#import "GPUImageOpeningFilter.h" +#import "GPUImageErosionFilter.h" +#import "GPUImageDilationFilter.h" + +@implementation GPUImageOpeningFilter + +@synthesize verticalTexelSpacing = _verticalTexelSpacing; +@synthesize horizontalTexelSpacing = _horizontalTexelSpacing; + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +- (id)initWithRadius:(NSUInteger)radius; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: erosion + erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius]; + [self addFilter:erosionFilter]; + + // Second pass: dilation + dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius]; + [self addFilter:dilationFilter]; + + [erosionFilter addTarget:dilationFilter]; + + self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil]; + self.terminalFilter = dilationFilter; + + return self; +} + +- (void)setVerticalTexelSpacing:(CGFloat)newValue; +{ + _verticalTexelSpacing = newValue; + erosionFilter.verticalTexelSpacing = newValue; + dilationFilter.verticalTexelSpacing = newValue; +} + +- (void)setHorizontalTexelSpacing:(CGFloat)newValue; +{ + _horizontalTexelSpacing = newValue; + erosionFilter.horizontalTexelSpacing = newValue; + dilationFilter.horizontalTexelSpacing = newValue; +} + +@end diff --git a/GPUImage/Source/GPUImageOutput.h b/GPUImage/Source/GPUImageOutput.h new file mode 100755 index 0000000..ad26e7b --- /dev/null +++ b/GPUImage/Source/GPUImageOutput.h @@ -0,0 +1,143 @@ +#import "GPUImageContext.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#import +#else +// For now, just redefine this on the Mac +typedef NS_ENUM(NSInteger, UIImageOrientation) { + UIImageOrientationUp, // default orientation + UIImageOrientationDown, // 180 deg rotation + UIImageOrientationLeft, // 90 deg CCW + UIImageOrientationRight, // 90 deg CW + UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip + UIImageOrientationDownMirrored, // horizontal flip + UIImageOrientationLeftMirrored, // vertical flip + UIImageOrientationRightMirrored, // vertical flip +}; +#endif + +typedef struct GPUTextureOptions { + GLenum minFilter; + GLenum magFilter; + GLenum wrapS; + GLenum wrapT; + GLenum internalFormat; + GLenum format; + GLenum type; +} GPUTextureOptions; + +void runOnMainQueueWithoutDeadlocking(void (^block)(void)); +void runSynchronouslyOnVideoProcessingQueue(void (^block)(void)); +void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void)); +void reportAvailableMemoryForGPUImage(NSString *tag); + +@class GPUImageMovieWriter; + +/** GPUImage's base source object + + Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include: + + - GPUImageVideoCamera (for live video from an iOS camera) + - GPUImageStillCamera (for taking photos with the camera) + - GPUImagePicture (for still images) + - GPUImageMovie (for movies) + + Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain. + */ +@interface GPUImageOutput : NSObject +{ + NSMutableArray *targets, *targetTextureIndices; + + GLuint outputTexture; + CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize; + + BOOL overrideInputSize; + + BOOL processingLargeImage; + NSUInteger outputTextureRetainCount; + + __unsafe_unretained id firstTextureDelegate; + BOOL shouldConserveMemoryForNextFrame; + + BOOL allTargetsWantMonochromeData; +} + +@property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput; +@property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget; +@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget; +@property(readwrite, nonatomic, unsafe_unretained) id targetToIgnoreForUpdates; +@property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime); +@property(nonatomic) BOOL enabled; +@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions; + +/// @name Managing targets +- (void)setInputTextureForTarget:(id)target atIndex:(NSInteger)inputTextureIndex; +- (GLuint)textureForOutput; +- (void)notifyTargetsAboutNewOutputTexture; + +/** Returns an array of the current targets. + */ +- (NSArray*)targets; + +/** Adds a target to receive notifications when new frames are available. + + The target will be asked for its next available texture. + + See [GPUImageInput newFrameReadyAtTime:] + + @param newTarget Target to be added + */ +- (void)addTarget:(id)newTarget; + +/** Adds a target to receive notifications when new frames are available. + + See [GPUImageInput newFrameReadyAtTime:] + + @param newTarget Target to be added + */ +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; + +/** Removes a target. The target will no longer receive notifications when new frames are available. + + @param targetToRemove Target to be removed + */ +- (void)removeTarget:(id)targetToRemove; + +/** Removes all targets. + */ +- (void)removeAllTargets; + +/// @name Manage the output texture + +- (void)initializeOutputTextureIfNeeded; +- (void)deleteOutputTexture; +- (void)forceProcessingAtSize:(CGSize)frameSize; +- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize; +- (void)cleanupOutputImage; + +/// @name Still image processing + +- (CGImageRef)newCGImageFromCurrentlyProcessedOutput; +- (CGImageRef)newCGImageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter; +- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter orientation:(UIImageOrientation)orientation; + +// Platform-specific image output methods +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +- (UIImage *)imageFromCurrentlyProcessedOutput; +- (UIImage *)imageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter; +- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter; +#else +- (NSImage *)imageFromCurrentlyProcessedOutput; +- (NSImage *)imageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter; +- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter; +#endif + +- (BOOL)providesMonochromeOutput; + +- (void)prepareForImageCapture; +- (void)conserveMemoryForNextFrame; + +@end diff --git a/GPUImage/Source/GPUImageOutput.m b/GPUImage/Source/GPUImageOutput.m new file mode 100755 index 0000000..06cba87 --- /dev/null +++ b/GPUImage/Source/GPUImageOutput.m @@ -0,0 +1,463 @@ +#import "GPUImageOutput.h" +#import "GPUImageMovieWriter.h" +#import "GPUImagePicture.h" +#import + +void runOnMainQueueWithoutDeadlocking(void (^block)(void)) +{ + if ([NSThread isMainThread]) + { + block(); + } + else + { + dispatch_sync(dispatch_get_main_queue(), block); + } +} + +void runSynchronouslyOnVideoProcessingQueue(void (^block)(void)) +{ + dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue]; +#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0)) + if (dispatch_get_current_queue() == videoProcessingQueue) +#else + if (dispatch_get_specific([GPUImageContext contextKey])) +#endif + { + block(); + }else + { + dispatch_sync(videoProcessingQueue, block); + } +} + +void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void)) +{ + dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue]; + +#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0)) + if (dispatch_get_current_queue() == videoProcessingQueue) +#else + if (dispatch_get_specific([GPUImageContext contextKey])) +#endif + { + block(); + }else + { + dispatch_async(videoProcessingQueue, block); + } +} + +void reportAvailableMemoryForGPUImage(NSString *tag) +{ + if (!tag) + tag = @"Default"; + + struct task_basic_info info; + + mach_msg_type_number_t size = sizeof(info); + + kern_return_t kerr = task_info(mach_task_self(), + + TASK_BASIC_INFO, + + (task_info_t)&info, + + &size); + if( kerr == KERN_SUCCESS ) { + NSLog(@"%@ - Memory used: %u", tag, (unsigned int)info.resident_size); //in bytes + } else { + NSLog(@"%@ - Error: %s", tag, mach_error_string(kerr)); + } +} + +@implementation GPUImageOutput + +@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput; +@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget; +@synthesize audioEncodingTarget = _audioEncodingTarget; +@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates; +@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock; +@synthesize enabled = _enabled; +@synthesize outputTextureOptions = _outputTextureOptions; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + targets = [[NSMutableArray alloc] init]; + targetTextureIndices = [[NSMutableArray alloc] init]; + _enabled = YES; + allTargetsWantMonochromeData = YES; + + // set default texture options + _outputTextureOptions.minFilter = GL_LINEAR; + _outputTextureOptions.magFilter = GL_LINEAR; + _outputTextureOptions.wrapS = GL_CLAMP_TO_EDGE; + _outputTextureOptions.wrapT = GL_CLAMP_TO_EDGE; + _outputTextureOptions.internalFormat = GL_RGBA; + _outputTextureOptions.format = GL_BGRA; + _outputTextureOptions.type = GL_UNSIGNED_BYTE; + + return self; +} + +- (void)dealloc +{ + [self removeAllTargets]; + [self deleteOutputTexture]; +} + +#pragma mark - +#pragma mark Managing targets + +- (void)setInputTextureForTarget:(id)target atIndex:(NSInteger)inputTextureIndex; +{ + [target setInputTexture:[self textureForOutput] atIndex:inputTextureIndex]; +} + +- (GLuint)textureForOutput; +{ + return outputTexture; +} + +- (void)notifyTargetsAboutNewOutputTexture; +{ + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [self setInputTextureForTarget:currentTarget atIndex:textureIndex]; + } +} + +- (NSArray*)targets; +{ + return [NSArray arrayWithArray:targets]; +} + +- (void)addTarget:(id)newTarget; +{ + NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex]; + [self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex]; + + if ([newTarget shouldIgnoreUpdatesToThisTarget]) + { + _targetToIgnoreForUpdates = newTarget; + } +} + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + if([targets containsObject:newTarget]) + { + return; + } + + cachedMaximumOutputSize = CGSizeZero; + runSynchronouslyOnVideoProcessingQueue(^{ + [self setInputTextureForTarget:newTarget atIndex:textureLocation]; + [newTarget setTextureDelegate:self atIndex:textureLocation]; + [targets addObject:newTarget]; + [targetTextureIndices addObject:[NSNumber numberWithInteger:textureLocation]]; + + allTargetsWantMonochromeData = allTargetsWantMonochromeData && [newTarget wantsMonochromeInput]; + }); +} + +- (void)removeTarget:(id)targetToRemove; +{ + if(![targets containsObject:targetToRemove]) + { + return; + } + + if (_targetToIgnoreForUpdates == targetToRemove) + { + _targetToIgnoreForUpdates = nil; + } + + cachedMaximumOutputSize = CGSizeZero; + + NSInteger indexOfObject = [targets indexOfObject:targetToRemove]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + runSynchronouslyOnVideoProcessingQueue(^{ + [targetToRemove setInputTexture:0 atIndex:textureIndexOfTarget]; + [targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget]; + [targetToRemove setTextureDelegate:nil atIndex:textureIndexOfTarget]; + [targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget]; + + [targetTextureIndices removeObjectAtIndex:indexOfObject]; + [targets removeObject:targetToRemove]; + [targetToRemove endProcessing]; + }); +} + +- (void)removeAllTargets; +{ + cachedMaximumOutputSize = CGSizeZero; + runSynchronouslyOnVideoProcessingQueue(^{ + for (id targetToRemove in targets) + { + NSInteger indexOfObject = [targets indexOfObject:targetToRemove]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [targetToRemove setInputTexture:0 atIndex:textureIndexOfTarget]; + [targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget]; + [targetToRemove setTextureDelegate:nil atIndex:textureIndexOfTarget]; + [targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget]; + } + [targets removeAllObjects]; + [targetTextureIndices removeAllObjects]; + + allTargetsWantMonochromeData = YES; + }); +} + +#pragma mark - +#pragma mark Manage the output texture + +- (void)initializeOutputTextureIfNeeded; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + if (!outputTexture) + { + [GPUImageContext useImageProcessingContext]; + + glActiveTexture(GL_TEXTURE0); + glGenTextures(1, &outputTexture); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.outputTextureOptions.minFilter); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.outputTextureOptions.magFilter); + // This is necessary for non-power-of-two textures + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + glBindTexture(GL_TEXTURE_2D, 0); + } + }); +} + +- (void)deleteOutputTexture; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + }); +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + +} + +- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize; +{ +} + +- (void)cleanupOutputImage; +{ + NSLog(@"WARNING: Undefined image cleanup"); +} + +#pragma mark - +#pragma mark Still image processing + +- (CGImageRef)newCGImageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +{ + return nil; +} + +- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter +{ + return [self newCGImageByFilteringCGImage:imageToFilter orientation:UIImageOrientationUp]; +} + +- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter orientation:(UIImageOrientation)orientation; +{ + GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithCGImage:imageToFilter]; + + [stillImageSource addTarget:(id)self]; + [stillImageSource processImage]; + + CGImageRef processedImage = [self newCGImageFromCurrentlyProcessedOutputWithOrientation:orientation]; + + [stillImageSource removeTarget:(id)self]; + return processedImage; +} + +- (BOOL)providesMonochromeOutput; +{ + return NO; +} + +- (void)prepareForImageCapture; +{ + +} + +- (void)conserveMemoryForNextFrame; +{ + shouldConserveMemoryForNextFrame = YES; + + for (id currentTarget in targets) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + [currentTarget conserveMemoryForNextFrame]; + } + } +} + +#pragma mark - +#pragma mark Platform-specific image output methods + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + +- (CGImageRef)newCGImageFromCurrentlyProcessedOutput; +{ + UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation]; + UIImageOrientation imageOrientation = UIImageOrientationLeft; + switch (deviceOrientation) + { + case UIDeviceOrientationPortrait: + imageOrientation = UIImageOrientationUp; + break; + case UIDeviceOrientationPortraitUpsideDown: + imageOrientation = UIImageOrientationDown; + break; + case UIDeviceOrientationLandscapeLeft: + imageOrientation = UIImageOrientationLeft; + break; + case UIDeviceOrientationLandscapeRight: + imageOrientation = UIImageOrientationRight; + break; + default: + imageOrientation = UIImageOrientationUp; + break; + } + + return [self newCGImageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; +} + +- (UIImage *)imageFromCurrentlyProcessedOutput; +{ + UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation]; + UIImageOrientation imageOrientation = UIImageOrientationLeft; + switch (deviceOrientation) + { + case UIDeviceOrientationPortrait: + imageOrientation = UIImageOrientationUp; + break; + case UIDeviceOrientationPortraitUpsideDown: + imageOrientation = UIImageOrientationDown; + break; + case UIDeviceOrientationLandscapeLeft: + imageOrientation = UIImageOrientationLeft; + break; + case UIDeviceOrientationLandscapeRight: + imageOrientation = UIImageOrientationRight; + break; + default: + imageOrientation = UIImageOrientationUp; + break; + } + + return [self imageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; +} + +- (UIImage *)imageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +{ + CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; + UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:imageOrientation]; + CGImageRelease(cgImageFromBytes); + + return finalImage; +} + +- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter; +{ + CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImage] orientation:[imageToFilter imageOrientation]]; + UIImage *processedImage = [UIImage imageWithCGImage:image scale:[imageToFilter scale] orientation:[imageToFilter imageOrientation]]; + CGImageRelease(image); + return processedImage; +} + +- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter +{ + return [self newCGImageByFilteringCGImage:[imageToFilter CGImage] orientation:[imageToFilter imageOrientation]]; +} + +#else + +- (CGImageRef)newCGImageFromCurrentlyProcessedOutput; +{ + return [self newCGImageFromCurrentlyProcessedOutputWithOrientation:UIImageOrientationLeft]; +} + +- (NSImage *)imageFromCurrentlyProcessedOutput; +{ + return [self imageFromCurrentlyProcessedOutputWithOrientation:UIImageOrientationLeft]; +} + +- (NSImage *)imageFromCurrentlyProcessedOutputWithOrientation:(UIImageOrientation)imageOrientation; +{ + CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutputWithOrientation:imageOrientation]; + NSImage *finalImage = [[NSImage alloc] initWithCGImage:cgImageFromBytes size:NSZeroSize]; + CGImageRelease(cgImageFromBytes); + + return finalImage; +} + +- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter; +{ + CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil] orientation:UIImageOrientationLeft]; + NSImage *processedImage = [[NSImage alloc] initWithCGImage:image size:NSZeroSize]; + CGImageRelease(image); + return processedImage; +} + +- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter +{ + return [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil] orientation:UIImageOrientationLeft]; +} + +#endif + +#pragma mark - +#pragma mark GPUImageTextureDelegate methods + +- (void)textureNoLongerNeededForTarget:(id)textureTarget; +{ + outputTextureRetainCount--; + if (outputTextureRetainCount < 1) + { + [self cleanupOutputImage]; + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue; +{ + _audioEncodingTarget = newValue; + if( ! _audioEncodingTarget.hasAudioTrack ) + { + _audioEncodingTarget.hasAudioTrack = YES; + } +} + +@end diff --git a/GPUImage/Source/GPUImageOverlayBlendFilter.h b/GPUImage/Source/GPUImageOverlayBlendFilter.h new file mode 100755 index 0000000..57eb840 --- /dev/null +++ b/GPUImage/Source/GPUImageOverlayBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageOverlayBlendFilter.m b/GPUImage/Source/GPUImageOverlayBlendFilter.m new file mode 100755 index 0000000..c8c5185 --- /dev/null +++ b/GPUImage/Source/GPUImageOverlayBlendFilter.m @@ -0,0 +1,94 @@ +#import "GPUImageOverlayBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 base = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + mediump float ra; + if (2.0 * base.r < base.a) { + ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } else { + ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } + + mediump float ga; + if (2.0 * base.g < base.a) { + ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } else { + ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } + + mediump float ba; + if (2.0 * base.b < base.a) { + ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } else { + ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } + + gl_FragColor = vec4(ra, ga, ba, 1.0); + } +); +#else +NSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + float ra; + if (2.0 * base.r < base.a) { + ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } else { + ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a); + } + + float ga; + if (2.0 * base.g < base.a) { + ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } else { + ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a); + } + + float ba; + if (2.0 * base.b < base.a) { + ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } else { + ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a); + } + + gl_FragColor = vec4(ra, ga, ba, 1.0); + } +); +#endif + +@implementation GPUImageOverlayBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageOverlayBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageParallelCoordinateLineTransformFilter.h b/GPUImage/Source/GPUImageParallelCoordinateLineTransformFilter.h new file mode 100644 index 0000000..aa8f3f4 --- /dev/null +++ b/GPUImage/Source/GPUImageParallelCoordinateLineTransformFilter.h @@ -0,0 +1,16 @@ +#import "GPUImageFilter.h" + +// This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene. +// +// It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications: +// M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7. +// M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494. + +@interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter +{ + GLubyte *rawImagePixels; + GLfloat *lineCoordinates; + unsigned int maxLinePairsToRender, linePairsToRender; +} + +@end diff --git a/GPUImage/Source/GPUImageParallelCoordinateLineTransformFilter.m b/GPUImage/Source/GPUImageParallelCoordinateLineTransformFilter.m new file mode 100644 index 0000000..a50df8c --- /dev/null +++ b/GPUImage/Source/GPUImageParallelCoordinateLineTransformFilter.m @@ -0,0 +1,252 @@ +#import "GPUImageParallelCoordinateLineTransformFilter.h" + +NSString *const kGPUImageHoughAccumulationVertexShaderString = SHADER_STRING +( + attribute vec4 position; + + void main() + { + gl_Position = position; + } +); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING +( + const lowp float scalingFactor = 1.0 / 256.0; + + void main() + { + gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0); + } +); + +// highp - 16-bit, floating point range: -2^62 to 2^62, integer range: -2^16 to 2^16 +// NOTE: See below for where I'm tacking on the required extension as a prefix +NSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING +( +// const lowp float scalingFactor = 0.004; + const lowp float scalingFactor = 0.1; + + void main() + { + mediump vec4 fragmentData = gl_LastFragData[0]; + + fragmentData.r = fragmentData.r + scalingFactor; + fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g; + fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b; + fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a; + + fragmentData = fract(fragmentData); + + gl_FragColor = vec4(fragmentData.rgb, 1.0); + } +); + +#else +NSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING +( + const float scalingFactor = 1.0 / 256.0; + + void main() + { + gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0); + } +); + +NSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING +( + const float scalingFactor = 1.0 / 256.0; + + void main() + { + // gl_FragColor = vec4(scalingFactor, scalingFactor, scalingFactor, 1.0); + gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0); + } +); +#endif + +@interface GPUImageParallelCoordinateLineTransformFilter() +// Rendering +- (void)generateLineCoordinates; + +@end + +@implementation GPUImageParallelCoordinateLineTransformFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + NSString *fragmentShaderToUse = nil; + + if ([GPUImageContext deviceSupportsFramebufferReads]) + { + fragmentShaderToUse = [NSString stringWithFormat:@"#extension GL_EXT_shader_framebuffer_fetch : require\n %@",kGPUImageHoughAccumulationFBOReadFragmentShaderString]; + } + else + { + fragmentShaderToUse = kGPUImageHoughAccumulationFragmentShaderString; + } + + if (!(self = [super initWithVertexShaderFromString:kGPUImageHoughAccumulationVertexShaderString fragmentShaderFromString:fragmentShaderToUse])) + { + return nil; + } + + + return self; +} + +// TODO: have this be regenerated on change of image size +- (void)dealloc; +{ + free(rawImagePixels); + free(lineCoordinates); +} + +- (void)initializeAttributes; +{ + [filterProgram addAttribute:@"position"]; +} + +#pragma mark - +#pragma mark Rendering + +#define MAXLINESCALINGFACTOR 4 + +- (void)generateLineCoordinates; +{ + unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4; + rawImagePixels = (GLubyte *)malloc(imageByteSize); + + maxLinePairsToRender = (inputTextureSize.width * inputTextureSize.height) / MAXLINESCALINGFACTOR; + lineCoordinates = calloc(maxLinePairsToRender * 8, sizeof(GLfloat)); +} + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + if (lineCoordinates == NULL) + { + [self generateLineCoordinates]; + } + + [self renderToTextureWithVertices:NULL textureCoordinates:NULL sourceTexture:filterSourceTexture]; + + [self informTargetsAboutNewFrameAtTime:frameTime]; +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + // we need a normal color texture for this filter + NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA."); + NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE."); + + if (self.preventRendering) + { + return; + } + + [GPUImageContext useImageProcessingContext]; + + // Grab the edge points from the previous frame and create the parallel coordinate lines for them + // This would be a great place to have a working histogram pyramid implementation + glFinish(); + glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels); + + CGFloat xAspectMultiplier = 1.0, yAspectMultiplier = 1.0; + +// if (inputTextureSize.width > inputTextureSize.height) +// { +// yAspectMultiplier = inputTextureSize.height / inputTextureSize.width; +// } +// else +// { +// xAspectMultiplier = inputTextureSize.width / inputTextureSize.height; +// } + +// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + + unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4; + unsigned int imageWidth = inputTextureSize.width * 4; + + linePairsToRender = 0; + unsigned int currentByte = 0; + unsigned int lineStorageIndex = 0; + unsigned int maxLineStorageIndex = maxLinePairsToRender * 8 - 8; + + GLfloat minY = 100, maxY = -100, minX = 100, maxX = -100; + while (currentByte < imageByteSize) + { + GLubyte colorByte = rawImagePixels[currentByte]; + + if (colorByte > 0) + { + unsigned int xCoordinate = currentByte % imageWidth; + unsigned int yCoordinate = currentByte / imageWidth; + + CGFloat normalizedXCoordinate = (-1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / inputTextureSize.width) * xAspectMultiplier; + CGFloat normalizedYCoordinate = (-1.0 + 2.0 * (CGFloat)(yCoordinate) / inputTextureSize.height) * yAspectMultiplier; + + minY = MIN(minY, normalizedYCoordinate); + maxY = MAX(maxY, normalizedYCoordinate); + minX = MIN(minX, normalizedXCoordinate); + maxX = MAX(maxX, normalizedXCoordinate); + +// NSLog(@"Parallel line coordinates: (%f, %f) - (%f, %f) - (%f, %f)", -1.0, -normalizedYCoordinate, 0.0, normalizedXCoordinate, 1.0, normalizedYCoordinate); + // T space coordinates, (-d, -y) to (0, x) + lineCoordinates[lineStorageIndex++] = -1.0; + lineCoordinates[lineStorageIndex++] = -normalizedYCoordinate; + lineCoordinates[lineStorageIndex++] = 0.0; + lineCoordinates[lineStorageIndex++] = normalizedXCoordinate; + + // S space coordinates, (0, x) to (d, y) + lineCoordinates[lineStorageIndex++] = 0.0; + lineCoordinates[lineStorageIndex++] = normalizedXCoordinate; + lineCoordinates[lineStorageIndex++] = 1.0; + lineCoordinates[lineStorageIndex++] = normalizedYCoordinate; + + linePairsToRender++; + + linePairsToRender = MIN(linePairsToRender, maxLinePairsToRender); + lineStorageIndex = MIN(lineStorageIndex, maxLineStorageIndex); + } + currentByte +=8; + } + +// NSLog(@"Line pairs to render: %d out of max: %d", linePairsToRender, maxLinePairsToRender); + +// CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); +// NSLog(@"Line generation processing time : %f ms", 1000.0 * currentFrameTime); + + [self setFilterFBO]; + + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glClearColor(0.0, 0.0, 0.0, 1.0); + glClear(GL_COLOR_BUFFER_BIT); + + if (![GPUImageContext deviceSupportsFramebufferReads]) + { + glBlendEquation(GL_FUNC_ADD); + glBlendFunc(GL_ONE, GL_ONE); + glEnable(GL_BLEND); + } + else + { + glLineWidth(1); + } + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates); + glDrawArrays(GL_LINES, 0, (linePairsToRender * 4)); + + if (![GPUImageContext deviceSupportsFramebufferReads]) + { + glDisable(GL_BLEND); + } +} + +@end diff --git a/GPUImage/Source/GPUImagePerlinNoiseFilter.h b/GPUImage/Source/GPUImagePerlinNoiseFilter.h new file mode 100644 index 0000000..922f4d3 --- /dev/null +++ b/GPUImage/Source/GPUImagePerlinNoiseFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +@interface GPUImagePerlinNoiseFilter : GPUImageFilter +{ + GLint scaleUniform, colorStartUniform, colorFinishUniform; +} + +@property (readwrite, nonatomic) GPUVector4 colorStart; +@property (readwrite, nonatomic) GPUVector4 colorFinish; + +@property (readwrite, nonatomic) float scale; + +@end diff --git a/GPUImage/Source/GPUImagePerlinNoiseFilter.m b/GPUImage/Source/GPUImagePerlinNoiseFilter.m new file mode 100644 index 0000000..9ca7cba --- /dev/null +++ b/GPUImage/Source/GPUImagePerlinNoiseFilter.m @@ -0,0 +1,239 @@ +#import "GPUImagePerlinNoiseFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING +( + precision highp float; + varying highp vec2 textureCoordinate; + uniform float scale; + + uniform vec4 colorStart; + uniform vec4 colorFinish; + + // + // Description : Array and textureless GLSL 2D/3D/4D simplex + // noise functions. + // Author : Ian McEwan, Ashima Arts. + // Maintainer : ijm + // Lastmod : 20110822 (ijm) + // License : Copyright (C) 2011 Ashima Arts. All rights reserved. + // Distributed under the MIT License. See LICENSE file. + // https://github.com/ashima/webgl-noise + // + + vec4 mod289(vec4 x) +{ + return x - floor(x * (1.0 / 289.0)) * 289.0; +} + + vec4 permute(vec4 x) +{ + return mod289(((x*34.0)+1.0)*x); +} + + vec4 taylorInvSqrt(vec4 r) +{ + return 1.79284291400159 - 0.85373472095314 * r; +} + + vec2 fade(vec2 t) { + return t*t*t*(t*(t*6.0-15.0)+10.0); + } + + // Classic Perlin noise + float cnoise(vec2 P) +{ + vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0); + vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0); + Pi = mod289(Pi); // To avoid truncation effects in permutation + vec4 ix = Pi.xzxz; + vec4 iy = Pi.yyww; + vec4 fx = Pf.xzxz; + vec4 fy = Pf.yyww; + + vec4 i = permute(permute(ix) + iy); + + vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ; + vec4 gy = abs(gx) - 0.5 ; + vec4 tx = floor(gx + 0.5); + gx = gx - tx; + + vec2 g00 = vec2(gx.x,gy.x); + vec2 g10 = vec2(gx.y,gy.y); + vec2 g01 = vec2(gx.z,gy.z); + vec2 g11 = vec2(gx.w,gy.w); + + vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11))); + g00 *= norm.x; + g01 *= norm.y; + g10 *= norm.z; + g11 *= norm.w; + + float n00 = dot(g00, vec2(fx.x, fy.x)); + float n10 = dot(g10, vec2(fx.y, fy.y)); + float n01 = dot(g01, vec2(fx.z, fy.z)); + float n11 = dot(g11, vec2(fx.w, fy.w)); + + vec2 fade_xy = fade(Pf.xy); + vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x); + float n_xy = mix(n_x.x, n_x.y, fade_xy.y); + return 2.3 * n_xy; +} + + + void main() + { + + float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0; + + vec4 colorDiff = colorFinish - colorStart; + vec4 color = colorStart + colorDiff * n1; + + gl_FragColor = color; + } +); +#else +NSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + uniform float scale; + + uniform vec4 colorStart; + uniform vec4 colorFinish; + + // + // Description : Array and textureless GLSL 2D/3D/4D simplex + // noise functions. + // Author : Ian McEwan, Ashima Arts. + // Maintainer : ijm + // Lastmod : 20110822 (ijm) + // License : Copyright (C) 2011 Ashima Arts. All rights reserved. + // Distributed under the MIT License. See LICENSE file. + // https://github.com/ashima/webgl-noise + // + + vec4 mod289(vec4 x) +{ + return x - floor(x * (1.0 / 289.0)) * 289.0; +} + + vec4 permute(vec4 x) +{ + return mod289(((x*34.0)+1.0)*x); +} + + vec4 taylorInvSqrt(vec4 r) +{ + return 1.79284291400159 - 0.85373472095314 * r; +} + + vec2 fade(vec2 t) { + return t*t*t*(t*(t*6.0-15.0)+10.0); + } + + // Classic Perlin noise + float cnoise(vec2 P) +{ + vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0); + vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0); + Pi = mod289(Pi); // To avoid truncation effects in permutation + vec4 ix = Pi.xzxz; + vec4 iy = Pi.yyww; + vec4 fx = Pf.xzxz; + vec4 fy = Pf.yyww; + + vec4 i = permute(permute(ix) + iy); + + vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ; + vec4 gy = abs(gx) - 0.5 ; + vec4 tx = floor(gx + 0.5); + gx = gx - tx; + + vec2 g00 = vec2(gx.x,gy.x); + vec2 g10 = vec2(gx.y,gy.y); + vec2 g01 = vec2(gx.z,gy.z); + vec2 g11 = vec2(gx.w,gy.w); + + vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11))); + g00 *= norm.x; + g01 *= norm.y; + g10 *= norm.z; + g11 *= norm.w; + + float n00 = dot(g00, vec2(fx.x, fy.x)); + float n10 = dot(g10, vec2(fx.y, fy.y)); + float n01 = dot(g01, vec2(fx.z, fy.z)); + float n11 = dot(g11, vec2(fx.w, fy.w)); + + vec2 fade_xy = fade(Pf.xy); + vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x); + float n_xy = mix(n_x.x, n_x.y, fade_xy.y); + return 2.3 * n_xy; + } + + void main() + { + + float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0; + + vec4 colorDiff = colorFinish - colorStart; + vec4 color = colorStart + colorDiff * n1; + + gl_FragColor = color; + } +); +#endif + + +@implementation GPUImagePerlinNoiseFilter + +@synthesize scale = _scale, colorStart = _colorStart, colorFinish = _colorFinish; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImagePerlinNoiseFragmentShaderString])) + { + return nil; + } + + scaleUniform = [filterProgram uniformIndex:@"scale"]; + + colorStartUniform = [filterProgram uniformIndex:@"colorStart"]; + colorFinishUniform = [filterProgram uniformIndex:@"colorFinish"]; + + [self setScale:8.0]; + + [self setColorStart:(GPUVector4){0.0, 0.0, 0.0, 1.0}]; + [self setColorFinish:(GPUVector4){1.0, 1.0, 1.0, 1.0}]; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setScale:(float)scale +{ + _scale = scale; + + [self setFloat:_scale forUniform:scaleUniform program:filterProgram]; +} + +- (void)setColorStart:(GPUVector4)colorStart +{ + _colorStart = colorStart; + + [self setVec4:_colorStart forUniform:colorStartUniform program:filterProgram]; +} + +- (void)setColorFinish:(GPUVector4)colorFinish +{ + _colorFinish = colorFinish; + + [self setVec4:_colorFinish forUniform:colorFinishUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImagePinchDistortionFilter.h b/GPUImage/Source/GPUImagePinchDistortionFilter.h new file mode 100755 index 0000000..994774f --- /dev/null +++ b/GPUImage/Source/GPUImagePinchDistortionFilter.h @@ -0,0 +1,20 @@ +#import "GPUImageFilter.h" + +/** Creates a pinch distortion of the image + */ +@interface GPUImagePinchDistortionFilter : GPUImageFilter +{ + GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform; +} + +/** The center about which to apply the distortion, with a default of (0.5, 0.5) + */ +@property(readwrite, nonatomic) CGPoint center; +/** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0 + */ +@property(readwrite, nonatomic) CGFloat radius; +/** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5 + */ +@property(readwrite, nonatomic) CGFloat scale; + +@end diff --git a/GPUImage/Source/GPUImagePinchDistortionFilter.m b/GPUImage/Source/GPUImagePinchDistortionFilter.m new file mode 100755 index 0000000..76d7909 --- /dev/null +++ b/GPUImage/Source/GPUImagePinchDistortionFilter.m @@ -0,0 +1,176 @@ +#import "GPUImagePinchDistortionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float aspectRatio; + uniform highp vec2 center; + uniform highp float radius; + uniform highp float scale; + + void main() + { + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float dist = distance(center, textureCoordinateToUse); + textureCoordinateToUse = textureCoordinate; + + if (dist < radius) + { + textureCoordinateToUse -= center; + highp float percent = 1.0 + ((0.5 - dist) / 0.5) * scale; + textureCoordinateToUse = textureCoordinateToUse * percent; + textureCoordinateToUse += center; + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + } + else + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate ); + } + } +); +#else +NSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float aspectRatio; + uniform vec2 center; + uniform float radius; + uniform float scale; + + void main() + { + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float dist = distance(center, textureCoordinateToUse); + textureCoordinateToUse = textureCoordinate; + + if (dist < radius) + { + textureCoordinateToUse -= center; + float percent = 1.0 + ((0.5 - dist) / 0.5) * scale; + textureCoordinateToUse = textureCoordinateToUse * percent; + textureCoordinateToUse += center; + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + } + else + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate ); + } + } +); +#endif + +@interface GPUImagePinchDistortionFilter () + +- (void)adjustAspectRatio; + +@property (readwrite, nonatomic) CGFloat aspectRatio; + +@end + +@implementation GPUImagePinchDistortionFilter + +@synthesize aspectRatio = _aspectRatio; +@synthesize center = _center; +@synthesize radius = _radius; +@synthesize scale = _scale; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImagePinchDistortionFragmentShaderString])) + { + return nil; + } + + aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"]; + radiusUniform = [filterProgram uniformIndex:@"radius"]; + scaleUniform = [filterProgram uniformIndex:@"scale"]; + centerUniform = [filterProgram uniformIndex:@"center"]; + + self.radius = 1.0; + self.scale = 0.5; + self.center = CGPointMake(0.5, 0.5); + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)adjustAspectRatio; +{ + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)]; + } + else + { + [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)]; + } +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + [self adjustAspectRatio]; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self adjustAspectRatio]; + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; + [self adjustAspectRatio]; +} + +- (void)setAspectRatio:(CGFloat)newValue; +{ + _aspectRatio = newValue; + + [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram]; +} + +- (void)setRadius:(CGFloat)newValue; +{ + _radius = newValue; + + [self setFloat:_radius forUniform:radiusUniform program:filterProgram]; +} + +- (void)setScale:(CGFloat)newValue; +{ + _scale = newValue; + + [self setFloat:_scale forUniform:scaleUniform program:filterProgram]; +} + +- (void)setCenter:(CGPoint)newValue; +{ + _center = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImagePixellateFilter.h b/GPUImage/Source/GPUImagePixellateFilter.h new file mode 100755 index 0000000..d0f6ae0 --- /dev/null +++ b/GPUImage/Source/GPUImagePixellateFilter.h @@ -0,0 +1,12 @@ +#import "GPUImageFilter.h" + +@interface GPUImagePixellateFilter : GPUImageFilter +{ + GLint fractionalWidthOfAPixelUniform, aspectRatioUniform; +} + +// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored. +@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel; + + +@end diff --git a/GPUImage/Source/GPUImagePixellateFilter.m b/GPUImage/Source/GPUImagePixellateFilter.m new file mode 100755 index 0000000..88430d0 --- /dev/null +++ b/GPUImage/Source/GPUImagePixellateFilter.m @@ -0,0 +1,151 @@ +#import "GPUImagePixellateFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float fractionalWidthOfPixel; + uniform highp float aspectRatio; + + void main() + { + highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + + highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + gl_FragColor = texture2D(inputImageTexture, samplePos ); + } +); +#else +NSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float fractionalWidthOfPixel; + uniform float aspectRatio; + + void main() + { + vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + + vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + gl_FragColor = texture2D(inputImageTexture, samplePos ); + } +); +#endif + +@interface GPUImagePixellateFilter () + +@property (readwrite, nonatomic) CGFloat aspectRatio; + +- (void)adjustAspectRatio; + +@end + +@implementation GPUImagePixellateFilter + +@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel; +@synthesize aspectRatio = _aspectRatio; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@"fractionalWidthOfPixel"]; + aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"]; + + self.fractionalWidthOfAPixel = 0.05; + + return self; +} + +- (void)adjustAspectRatio; +{ + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)]; + } + else + { + [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)]; + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self adjustAspectRatio]; +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + [self adjustAspectRatio]; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self adjustAspectRatio]; + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setFractionalWidthOfAPixel:(CGFloat)newValue; +{ + CGFloat singlePixelSpacing; + if (inputTextureSize.width != 0.0) + { + singlePixelSpacing = 1.0 / inputTextureSize.width; + } + else + { + singlePixelSpacing = 1.0 / 2048.0; + } + + if (newValue < singlePixelSpacing) + { + _fractionalWidthOfAPixel = singlePixelSpacing; + } + else + { + _fractionalWidthOfAPixel = newValue; + } + + [self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram]; +} + +- (void)setAspectRatio:(CGFloat)newValue; +{ + _aspectRatio = newValue; + + [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImagePixellatePositionFilter.h b/GPUImage/Source/GPUImagePixellatePositionFilter.h new file mode 100755 index 0000000..9d304c9 --- /dev/null +++ b/GPUImage/Source/GPUImagePixellatePositionFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilter.h" + +@interface GPUImagePixellatePositionFilter : GPUImageFilter +{ + GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform; +} + +// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored. +@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel; + +// the center point to start pixelation in texture coordinates, default 0.5, 0.5 +@property(readwrite, nonatomic) CGPoint center; + +// the radius (0.0 - 1.0) in which to pixelate, default 1.0 +@property(readwrite, nonatomic) CGFloat radius; + +@end diff --git a/GPUImage/Source/GPUImagePixellatePositionFilter.m b/GPUImage/Source/GPUImagePixellatePositionFilter.m new file mode 100755 index 0000000..f1bd09c --- /dev/null +++ b/GPUImage/Source/GPUImagePixellatePositionFilter.m @@ -0,0 +1,194 @@ +#import "GPUImagePixellatePositionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float fractionalWidthOfPixel; + uniform highp float aspectRatio; + uniform lowp vec2 pixelateCenter; + uniform highp float pixelateRadius; + + void main() + { + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float dist = distance(pixelateCenter, textureCoordinateToUse); + + if (dist < pixelateRadius) + { + highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + gl_FragColor = texture2D(inputImageTexture, samplePos ); + } + else + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate ); + } + } +); +#else +NSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float fractionalWidthOfPixel; + uniform float aspectRatio; + uniform vec2 pixelateCenter; + uniform float pixelateRadius; + + void main() + { + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float dist = distance(pixelateCenter, textureCoordinateToUse); + + if (dist < pixelateRadius) + { + vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + gl_FragColor = texture2D(inputImageTexture, samplePos ); + } + else + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate ); + } + } +); +#endif + +@interface GPUImagePixellatePositionFilter () + +- (void)adjustAspectRatio; + +@property (readwrite, nonatomic) CGFloat aspectRatio; + +@end + +@implementation GPUImagePixellatePositionFilter + +@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel; +@synthesize aspectRatio = _aspectRatio; +@synthesize center = _center; +@synthesize radius = _radius; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationPositionFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@"fractionalWidthOfPixel"]; + aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"]; + centerUniform = [filterProgram uniformIndex:@"pixelateCenter"]; + radiusUniform = [filterProgram uniformIndex:@"pixelateRadius"]; + + self.fractionalWidthOfAPixel = 0.05; + self.center = CGPointMake(0.5f, 0.5f); + self.radius = 0.25f; + + return self; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self adjustAspectRatio]; + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; + [self adjustAspectRatio]; +} + +- (void)adjustAspectRatio; +{ + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)]; + } + else + { + [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)]; + } +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + [self adjustAspectRatio]; +} + +- (void)setFractionalWidthOfAPixel:(CGFloat)newValue; +{ + CGFloat singlePixelSpacing; + if (inputTextureSize.width != 0.0) + { + singlePixelSpacing = 1.0 / inputTextureSize.width; + } + else + { + singlePixelSpacing = 1.0 / 2048.0; + } + + if (newValue < singlePixelSpacing) + { + _fractionalWidthOfAPixel = singlePixelSpacing; + } + else + { + _fractionalWidthOfAPixel = newValue; + } + + [self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram]; +} + +- (void)setAspectRatio:(CGFloat)newValue; +{ + _aspectRatio = newValue; + + [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram]; +} + +- (void)setCenter:(CGPoint)center +{ + _center = center; + CGPoint rotatedPoint = [self rotatedPoint:center forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +- (void)setRadius:(CGFloat)radius +{ + _radius = radius; + + [self setFloat:_radius forUniform:radiusUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImagePoissonBlendFilter.h b/GPUImage/Source/GPUImagePoissonBlendFilter.h new file mode 100644 index 0000000..da3b636 --- /dev/null +++ b/GPUImage/Source/GPUImagePoissonBlendFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageTwoInputCrossTextureSamplingFilter.h" +#import "GPUImageFilterGroup.h" + +@interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter +{ + GLint mixUniform; + + GLuint secondFilterOutputTexture; + GLuint secondFilterFramebuffer; +} + +// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level +@property(readwrite, nonatomic) CGFloat mix; + +// The number of times to propagate the gradients. +// Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow. +@property(readwrite, nonatomic) NSUInteger numIterations; + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImagePoissonBlendFilter.m b/GPUImage/Source/GPUImagePoissonBlendFilter.m new file mode 100644 index 0000000..105031a --- /dev/null +++ b/GPUImage/Source/GPUImagePoissonBlendFilter.m @@ -0,0 +1,380 @@ +#import "GPUImagePoissonBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING +( + precision mediump float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + varying vec2 topTextureCoordinate; + varying vec2 bottomTextureCoordinate; + + varying vec2 textureCoordinate2; + varying vec2 leftTextureCoordinate2; + varying vec2 rightTextureCoordinate2; + varying vec2 topTextureCoordinate2; + varying vec2 bottomTextureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform lowp float mixturePercent; + + void main() + { + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + + vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2); + vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb; + vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb; + vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb; + vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb; + + vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0; + vec3 diffColor = centerColor.rgb - meanColor; + + vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0; + vec3 diffColor2 = centerColor2.rgb - meanColor2; + + vec3 gradColor = (meanColor + diffColor2); + + gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a); + } +); +#else +NSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + varying vec2 topTextureCoordinate; + varying vec2 bottomTextureCoordinate; + + varying vec2 textureCoordinate2; + varying vec2 leftTextureCoordinate2; + varying vec2 rightTextureCoordinate2; + varying vec2 topTextureCoordinate2; + varying vec2 bottomTextureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform float mixturePercent; + + void main() + { + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + + vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2); + vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb; + vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb; + vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb; + vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb; + + vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0; + vec3 diffColor = centerColor.rgb - meanColor; + + vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0; + vec3 diffColor2 = centerColor2.rgb - meanColor2; + + vec3 gradColor = (meanColor + diffColor2); + + gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a); + } +); +#endif + +@implementation GPUImagePoissonBlendFilter + +@synthesize mix = _mix; +@synthesize numIterations = _numIterations; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImagePoissonBlendFragmentShaderString])) + { + return nil; + } + + mixUniform = [filterProgram uniformIndex:@"mixturePercent"]; + self.mix = 0.5; + + self.numIterations = 10; + + return self; +} + +- (void)setMix:(CGFloat)newValue; +{ + _mix = newValue; + + [self setFloat:_mix forUniform:mixUniform program:filterProgram]; +} + +- (void)initializeSecondOutputTextureIfNeeded; +{ + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + return; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (!secondFilterOutputTexture) + { + glGenTextures(1, &secondFilterOutputTexture); + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glBindTexture(GL_TEXTURE_2D, 0); + } + }); +} + +- (void)deleteOutputTexture; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + + if (!([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage)) + { + if (secondFilterOutputTexture) + { + glDeleteTextures(1, &secondFilterOutputTexture); + secondFilterOutputTexture = 0; + } + } + }); +} + +- (void)createFilterFBOofSize:(CGSize)currentFBOSize +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (!filterFramebuffer) + { + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + preparedToCaptureImage = NO; + [super createFilterFBOofSize:currentFBOSize]; + preparedToCaptureImage = YES; + } + else + { + [super createFilterFBOofSize:currentFBOSize]; + } + } + + glGenFramebuffers(1, &secondFilterFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#endif + + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ + + CFDictionaryRef empty; // empty value for attr value. + CFMutableDictionaryRef attrs; + empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary + attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); + CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty); + + err = CVPixelBufferCreate(kCFAllocatorDefault, (int)currentFBOSize.width, (int)currentFBOSize.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget); + if (err) + { + NSLog(@"FBO size: %f, %f", currentFBOSize.width, currentFBOSize.height); + NSAssert(NO, @"Error at CVPixelBufferCreate %d", err); + } + + err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, + filterTextureCache, renderTarget, + NULL, // texture attributes + GL_TEXTURE_2D, + self.outputTextureOptions.internalFormat, // opengl format + (int)currentFBOSize.width, + (int)currentFBOSize.height, + self.outputTextureOptions.format, // native iOS format + self.outputTextureOptions.type, + 0, + &renderTexture); + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + CFRelease(attrs); + CFRelease(empty); + glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); + secondFilterOutputTexture = CVOpenGLESTextureGetName(renderTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); + + [self notifyTargetsAboutNewOutputTexture]; +#endif + } + else + { + [self initializeSecondOutputTextureIfNeeded]; + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + // if ([self providesMonochromeOutput] && [GPUImageContext deviceSupportsRedTextures]) + // { + // glTexImage2D(GL_TEXTURE_2D, 0, GL_RG_EXT, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RG_EXT, GL_UNSIGNED_BYTE, 0); + // } + // else + // { + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + (int)currentFBOSize.width, + (int)currentFBOSize.height, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, 0); + // } + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, secondFilterOutputTexture, 0); + + [self notifyTargetsAboutNewOutputTexture]; + } + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + + glBindTexture(GL_TEXTURE_2D, 0); + }); +} + +- (void)recreateFilterFBO +{ + cachedMaximumOutputSize = CGSizeZero; + + [self destroyFilterFBO]; + [self deleteOutputTexture]; +} + +- (void)destroyFilterFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (filterFramebuffer) + { + glDeleteFramebuffers(1, &filterFramebuffer); + filterFramebuffer = 0; + } + + if (secondFilterFramebuffer) + { + glDeleteFramebuffers(1, &secondFilterFramebuffer); + secondFilterFramebuffer = 0; + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + if (filterTextureCache != NULL) + { + CFRelease(renderTarget); + renderTarget = NULL; + + if (renderTexture) + { + CFRelease(renderTexture); + renderTexture = NULL; + } + + CVOpenGLESTextureCacheFlush(filterTextureCache, 0); + CFRelease(filterTextureCache); + filterTextureCache = NULL; + } +#endif + }); +} + +- (void)setSecondFilterFBO; +{ + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); +} + +- (void)setOutputFBO; +{ + if (self.numIterations % 2 == 1) { + [self setSecondFilterFBO]; + } else { + [self setFilterFBO]; + } +} + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + // Run the first stage of the two-pass filter + [GPUImageContext setActiveShaderProgram:filterProgram]; + + [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates sourceTexture:sourceTexture]; + + for (int pass = 1; pass < self.numIterations; pass++) { + + if (pass % 2 == 0) { + + [GPUImageContext setActiveShaderProgram:filterProgram]; + + [super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation] sourceTexture:secondFilterOutputTexture]; + } else { + // Run the second stage of the two-pass filter + [self setSecondFilterFBO]; + + [GPUImageContext setActiveShaderProgram:filterProgram]; + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glUniform1i(filterInputTextureUniform, 2); + + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, filterSourceTexture2); + glUniform1i(filterInputTextureUniform2, 3); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]); + glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + } + } +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImagePolarPixellateFilter.h b/GPUImage/Source/GPUImagePolarPixellateFilter.h new file mode 100755 index 0000000..3de6a4d --- /dev/null +++ b/GPUImage/Source/GPUImagePolarPixellateFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +@interface GPUImagePolarPixellateFilter : GPUImageFilter { + GLint centerUniform, pixelSizeUniform; +} + +// The center about which to apply the distortion, with a default of (0.5, 0.5) +@property(readwrite, nonatomic) CGPoint center; +// The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05) +@property(readwrite, nonatomic) CGSize pixelSize; + + +@end diff --git a/GPUImage/Source/GPUImagePolarPixellateFilter.m b/GPUImage/Source/GPUImagePolarPixellateFilter.m new file mode 100755 index 0000000..5677db4 --- /dev/null +++ b/GPUImage/Source/GPUImagePolarPixellateFilter.m @@ -0,0 +1,128 @@ +#import "GPUImagePolarPixellateFilter.h" + +// @fattjake based on vid by toneburst + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp vec2 center; + uniform highp vec2 pixelSize; + + + void main() + { + highp vec2 normCoord = 2.0 * textureCoordinate - 1.0; + highp vec2 normCenter = 2.0 * center - 1.0; + + normCoord -= normCenter; + + highp float r = length(normCoord); // to polar coords + highp float phi = atan(normCoord.y, normCoord.x); // to polar coords + + r = r - mod(r, pixelSize.x) + 0.03; + phi = phi - mod(phi, pixelSize.y); + + normCoord.x = r * cos(phi); + normCoord.y = r * sin(phi); + + normCoord += normCenter; + + mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5; + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + + } +); +#else +NSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform vec2 center; + uniform vec2 pixelSize; + + + void main() + { + vec2 normCoord = 2.0 * textureCoordinate - 1.0; + vec2 normCenter = 2.0 * center - 1.0; + + normCoord -= normCenter; + + float r = length(normCoord); // to polar coords + float phi = atan(normCoord.y, normCoord.x); // to polar coords + + r = r - mod(r, pixelSize.x) + 0.03; + phi = phi - mod(phi, pixelSize.y); + + normCoord.x = r * cos(phi); + normCoord.y = r * sin(phi); + + normCoord += normCenter; + + vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5; + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + + } +); +#endif + + +@implementation GPUImagePolarPixellateFilter + +@synthesize center = _center; + +@synthesize pixelSize = _pixelSize; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolarPixellateFragmentShaderString])) + { + return nil; + } + + pixelSizeUniform = [filterProgram uniformIndex:@"pixelSize"]; + centerUniform = [filterProgram uniformIndex:@"center"]; + + + self.pixelSize = CGSizeMake(0.05, 0.05); + self.center = CGPointMake(0.5, 0.5); + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; +} + +- (void)setPixelSize:(CGSize)pixelSize +{ + _pixelSize = pixelSize; + + [self setSize:_pixelSize forUniform:pixelSizeUniform program:filterProgram]; +} + +- (void)setCenter:(CGPoint)newValue; +{ + _center = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImagePolkaDotFilter.h b/GPUImage/Source/GPUImagePolkaDotFilter.h new file mode 100644 index 0000000..369b773 --- /dev/null +++ b/GPUImage/Source/GPUImagePolkaDotFilter.h @@ -0,0 +1,10 @@ +#import "GPUImagePixellateFilter.h" + +@interface GPUImagePolkaDotFilter : GPUImagePixellateFilter +{ + GLint dotScalingUniform; +} + +@property(readwrite, nonatomic) CGFloat dotScaling; + +@end diff --git a/GPUImage/Source/GPUImagePolkaDotFilter.m b/GPUImage/Source/GPUImagePolkaDotFilter.m new file mode 100644 index 0000000..a439a04 --- /dev/null +++ b/GPUImage/Source/GPUImagePolkaDotFilter.m @@ -0,0 +1,85 @@ +#import "GPUImagePolkaDotFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float fractionalWidthOfPixel; + uniform highp float aspectRatio; + uniform highp float dotScaling; + + void main() + { + highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + + highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse); + lowp float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling); + + lowp vec4 inputColor = texture2D(inputImageTexture, samplePos); + + gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a); + } +); +#else +NSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float fractionalWidthOfPixel; + uniform float aspectRatio; + uniform float dotScaling; + + void main() + { + vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio); + + vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor; + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse); + float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling); + + vec4 inputColor = texture2D(inputImageTexture, samplePos); + + gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a); + } +); +#endif + +@implementation GPUImagePolkaDotFilter + +@synthesize dotScaling = _dotScaling; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolkaDotFragmentShaderString])) + { + return nil; + } + + dotScalingUniform = [filterProgram uniformIndex:@"dotScaling"]; + + self.dotScaling = 0.90; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setDotScaling:(CGFloat)newValue; +{ + _dotScaling = newValue; + + [self setFloat:_dotScaling forUniform:dotScalingUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImagePosterizeFilter.h b/GPUImage/Source/GPUImagePosterizeFilter.h new file mode 100755 index 0000000..6f655b3 --- /dev/null +++ b/GPUImage/Source/GPUImagePosterizeFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilter.h" + +/** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image. + */ +@interface GPUImagePosterizeFilter : GPUImageFilter +{ + GLint colorLevelsUniform; +} + +/** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10. + */ +@property(readwrite, nonatomic) NSUInteger colorLevels; + +@end diff --git a/GPUImage/Source/GPUImagePosterizeFilter.m b/GPUImage/Source/GPUImagePosterizeFilter.m new file mode 100755 index 0000000..a438cea --- /dev/null +++ b/GPUImage/Source/GPUImagePosterizeFilter.m @@ -0,0 +1,66 @@ +#import "GPUImagePosterizeFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform highp float colorLevels; + + void main() + { + highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels; + } +); +#else +NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float colorLevels; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels; + } +); +#endif + +@implementation GPUImagePosterizeFilter + +@synthesize colorLevels = _colorLevels; + +#pragma mark - +#pragma mark Initialization + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImagePosterizeFragmentShaderString])) + { + return nil; + } + + colorLevelsUniform = [filterProgram uniformIndex:@"colorLevels"]; + self.colorLevels = 10; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setColorLevels:(NSUInteger)newValue; +{ + _colorLevels = newValue; + + [self setFloat:_colorLevels forUniform:colorLevelsUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImagePrewittEdgeDetectionFilter.h b/GPUImage/Source/GPUImagePrewittEdgeDetectionFilter.h new file mode 100755 index 0000000..141f8c5 --- /dev/null +++ b/GPUImage/Source/GPUImagePrewittEdgeDetectionFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageSobelEdgeDetectionFilter.h" + +@interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter + +@end diff --git a/GPUImage/Source/GPUImagePrewittEdgeDetectionFilter.m b/GPUImage/Source/GPUImagePrewittEdgeDetectionFilter.m new file mode 100755 index 0000000..a990693 --- /dev/null +++ b/GPUImage/Source/GPUImagePrewittEdgeDetectionFilter.m @@ -0,0 +1,97 @@ +#import "GPUImagePrewittEdgeDetectionFilter.h" + +@implementation GPUImagePrewittEdgeDetectionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float edgeStrength; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity; + + float mag = length(vec2(h, v)) * edgeStrength; + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#else +NSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float edgeStrength; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity; + + float mag = length(vec2(h, v)) * edgeStrength; + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImagePrewittFragmentShaderString])) + { + return nil; + } + + self.edgeStrength = 1.0; + + return self; +} + + +@end diff --git a/GPUImage/Source/GPUImageRGBClosingFilter.h b/GPUImage/Source/GPUImageRGBClosingFilter.h new file mode 100644 index 0000000..08d13f8 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBClosingFilter.h @@ -0,0 +1,18 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageRGBErosionFilter; +@class GPUImageRGBDilationFilter; + +// A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius. +// This helps to filter out smaller dark elements. + +@interface GPUImageRGBClosingFilter : GPUImageFilterGroup +{ + GPUImageRGBErosionFilter *erosionFilter; + GPUImageRGBDilationFilter *dilationFilter; +} + +- (id)initWithRadius:(NSUInteger)radius; + + +@end diff --git a/GPUImage/Source/GPUImageRGBClosingFilter.m b/GPUImage/Source/GPUImageRGBClosingFilter.m new file mode 100644 index 0000000..c5bb1c8 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBClosingFilter.m @@ -0,0 +1,41 @@ +#import "GPUImageRGBClosingFilter.h" +#import "GPUImageRGBErosionFilter.h" +#import "GPUImageRGBDilationFilter.h" + +@implementation GPUImageRGBClosingFilter + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +- (id)initWithRadius:(NSUInteger)radius; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: dilation + dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius]; + [self addFilter:dilationFilter]; + + // Second pass: erosion + erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius]; + [self addFilter:erosionFilter]; + + [dilationFilter addTarget:erosionFilter]; + + self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil]; + self.terminalFilter = erosionFilter; + + return self; +} + + +@end diff --git a/GPUImage/Source/GPUImageRGBDilationFilter.h b/GPUImage/Source/GPUImageRGBDilationFilter.h new file mode 100644 index 0000000..68276f8 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBDilationFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +// For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center. +// This extends out brighter colors, and can be used for abstraction of color images. + +@interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter + +// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4. +- (id)initWithRadius:(NSUInteger)dilationRadius; + +@end diff --git a/GPUImage/Source/GPUImageRGBDilationFilter.m b/GPUImage/Source/GPUImageRGBDilationFilter.m new file mode 100644 index 0000000..9702c78 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBDilationFilter.m @@ -0,0 +1,306 @@ +#import "GPUImageRGBDilationFilter.h" +#import "GPUImageDilationFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + + lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + + gl_FragColor = max(maxValue, oneStepNegativeIntensity); + } +); + +NSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + + lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + + gl_FragColor = max(maxValue, twoStepsNegativeIntensity); + } +); + +NSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + + lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + + gl_FragColor = max(maxValue, threeStepsNegativeIntensity); + } +); + +NSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate); + lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate); + + lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + maxValue = max(maxValue, threeStepsNegativeIntensity); + maxValue = max(maxValue, fourStepsPositiveIntensity); + + gl_FragColor = max(maxValue, fourStepsNegativeIntensity); + } +); +#else +NSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + + vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + + gl_FragColor = max(maxValue, oneStepNegativeIntensity); + } + ); + +NSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + + vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + + gl_FragColor = max(maxValue, twoStepsNegativeIntensity); + } + ); + +NSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + + vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + + gl_FragColor = max(maxValue, threeStepsNegativeIntensity); + } +); + +NSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate); + vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate); + + vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity); + maxValue = max(maxValue, oneStepNegativeIntensity); + maxValue = max(maxValue, twoStepsPositiveIntensity); + maxValue = max(maxValue, twoStepsNegativeIntensity); + maxValue = max(maxValue, threeStepsPositiveIntensity); + maxValue = max(maxValue, threeStepsNegativeIntensity); + maxValue = max(maxValue, fourStepsPositiveIntensity); + + gl_FragColor = max(maxValue, fourStepsNegativeIntensity); + } +); +#endif + +@implementation GPUImageRGBDilationFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithRadius:(NSUInteger)dilationRadius; +{ + NSString *fragmentShaderForThisRadius = nil; + NSString *vertexShaderForThisRadius = nil; + + switch (dilationRadius) + { + case 0: + case 1: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusOneFragmentShaderString; + }; break; + case 2: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusTwoFragmentShaderString; + }; break; + case 3: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusThreeFragmentShaderString; + }; break; + case 4: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString; + }; break; + default: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString; + }; break; + } + + if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius])) + { + return nil; + } + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageRGBErosionFilter.h b/GPUImage/Source/GPUImageRGBErosionFilter.h new file mode 100644 index 0000000..5979cb7 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBErosionFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +// For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center. +// This extends out dark features, and can be used for abstraction of color images. + +@interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter + +// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4. +- (id)initWithRadius:(NSUInteger)erosionRadius; + +@end diff --git a/GPUImage/Source/GPUImageRGBErosionFilter.m b/GPUImage/Source/GPUImageRGBErosionFilter.m new file mode 100644 index 0000000..91e5f33 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBErosionFilter.m @@ -0,0 +1,304 @@ +#import "GPUImageRGBErosionFilter.h" +#import "GPUImageDilationFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + + lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + + gl_FragColor = min(minValue, oneStepNegativeIntensity); + } +); + +NSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + + lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + + gl_FragColor = min(minValue, twoStepsNegativeIntensity); + } + ); + +NSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + + lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + + gl_FragColor = min(minValue, threeStepsNegativeIntensity); + } + ); + +NSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate); + lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate); + + lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + minValue = min(minValue, threeStepsNegativeIntensity); + minValue = min(minValue, fourStepsPositiveIntensity); + + gl_FragColor = min(minValue, fourStepsNegativeIntensity); + } +); +#else +NSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + + vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + + gl_FragColor = min(minValue, oneStepNegativeIntensity); + } +); + +NSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + + vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + + gl_FragColor = min(minValue, twoStepsNegativeIntensity); + } +); + +NSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + + vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + + gl_FragColor = min(minValue, threeStepsNegativeIntensity); + } +); + +NSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING +( + varying vec2 centerTextureCoordinate; + varying vec2 oneStepPositiveTextureCoordinate; + varying vec2 oneStepNegativeTextureCoordinate; + varying vec2 twoStepsPositiveTextureCoordinate; + varying vec2 twoStepsNegativeTextureCoordinate; + varying vec2 threeStepsPositiveTextureCoordinate; + varying vec2 threeStepsNegativeTextureCoordinate; + varying vec2 fourStepsPositiveTextureCoordinate; + varying vec2 fourStepsNegativeTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate); + vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate); + vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate); + vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate); + vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate); + vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate); + vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate); + vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate); + vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate); + + vec4 minValue = min(centerIntensity, oneStepPositiveIntensity); + minValue = min(minValue, oneStepNegativeIntensity); + minValue = min(minValue, twoStepsPositiveIntensity); + minValue = min(minValue, twoStepsNegativeIntensity); + minValue = min(minValue, threeStepsPositiveIntensity); + minValue = min(minValue, threeStepsNegativeIntensity); + minValue = min(minValue, fourStepsPositiveIntensity); + + gl_FragColor = min(minValue, fourStepsNegativeIntensity); + } +); +#endif + +@implementation GPUImageRGBErosionFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithRadius:(NSUInteger)erosionRadius; +{ + NSString *fragmentShaderForThisRadius = nil; + NSString *vertexShaderForThisRadius = nil; + + switch (erosionRadius) + { + case 0: + case 1: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusOneFragmentShaderString; + }; break; + case 2: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusTwoFragmentShaderString; + }; break; + case 3: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusThreeFragmentShaderString; + }; break; + case 4: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString; + }; break; + default: + { + vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString; + fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString; + }; break; + } + + if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius])) + { + return nil; + } + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageRGBFilter.h b/GPUImage/Source/GPUImageRGBFilter.h new file mode 100755 index 0000000..18966b1 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBFilter.h @@ -0,0 +1,15 @@ +#import "GPUImageFilter.h" + +@interface GPUImageRGBFilter : GPUImageFilter +{ + GLint redUniform; + GLint greenUniform; + GLint blueUniform; +} + +// Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default. +@property (readwrite, nonatomic) CGFloat red; +@property (readwrite, nonatomic) CGFloat green; +@property (readwrite, nonatomic) CGFloat blue; + +@end diff --git a/GPUImage/Source/GPUImageRGBFilter.m b/GPUImage/Source/GPUImageRGBFilter.m new file mode 100755 index 0000000..7a2e568 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBFilter.m @@ -0,0 +1,89 @@ +#import "GPUImageRGBFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform highp float redAdjustment; + uniform highp float greenAdjustment; + uniform highp float blueAdjustment; + + void main() + { + highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a); + } +); +#else +NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float redAdjustment; + uniform float greenAdjustment; + uniform float blueAdjustment; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a); + } + ); +#endif + +@implementation GPUImageRGBFilter + +@synthesize red = _red, blue = _blue, green = _green; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageRGBFragmentShaderString])) + { + return nil; + } + + redUniform = [filterProgram uniformIndex:@"redAdjustment"]; + self.red = 1.0; + + greenUniform = [filterProgram uniformIndex:@"greenAdjustment"]; + self.green = 1.0; + + blueUniform = [filterProgram uniformIndex:@"blueAdjustment"]; + self.blue = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setRed:(CGFloat)newValue; +{ + _red = newValue; + + [self setFloat:_red forUniform:redUniform program:filterProgram]; +} + +- (void)setGreen:(CGFloat)newValue; +{ + _green = newValue; + + [self setFloat:_green forUniform:greenUniform program:filterProgram]; +} + +- (void)setBlue:(CGFloat)newValue; +{ + _blue = newValue; + + [self setFloat:_blue forUniform:blueUniform program:filterProgram]; +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageRGBOpeningFilter.h b/GPUImage/Source/GPUImageRGBOpeningFilter.h new file mode 100644 index 0000000..dbec75f --- /dev/null +++ b/GPUImage/Source/GPUImageRGBOpeningFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageRGBErosionFilter; +@class GPUImageRGBDilationFilter; + +// A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius. +// This helps to filter out smaller bright elements. + +@interface GPUImageRGBOpeningFilter : GPUImageFilterGroup +{ + GPUImageRGBErosionFilter *erosionFilter; + GPUImageRGBDilationFilter *dilationFilter; +} + +- (id)initWithRadius:(NSUInteger)radius; + +@end diff --git a/GPUImage/Source/GPUImageRGBOpeningFilter.m b/GPUImage/Source/GPUImageRGBOpeningFilter.m new file mode 100644 index 0000000..9d53021 --- /dev/null +++ b/GPUImage/Source/GPUImageRGBOpeningFilter.m @@ -0,0 +1,41 @@ +#import "GPUImageRGBOpeningFilter.h" +#import "GPUImageRGBErosionFilter.h" +#import "GPUImageRGBDilationFilter.h" + +@implementation GPUImageRGBOpeningFilter + +- (id)init; +{ + if (!(self = [self initWithRadius:1])) + { + return nil; + } + + return self; +} + +- (id)initWithRadius:(NSUInteger)radius; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: erosion + erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius]; + [self addFilter:erosionFilter]; + + // Second pass: dilation + dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius]; + [self addFilter:dilationFilter]; + + [erosionFilter addTarget:dilationFilter]; + + self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil]; + self.terminalFilter = dilationFilter; + + return self; +} + + +@end diff --git a/GPUImage/Source/GPUImageRawDataInput.h b/GPUImage/Source/GPUImageRawDataInput.h new file mode 100644 index 0000000..bc24dad --- /dev/null +++ b/GPUImage/Source/GPUImageRawDataInput.h @@ -0,0 +1,41 @@ +#import "GPUImageOutput.h" + +// The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter. +// The bytes are uploaded and stored within a texture, so nothing is kept locally. +// The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat: +// The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType: + +typedef enum { + GPUPixelFormatBGRA = GL_BGRA, + GPUPixelFormatRGBA = GL_RGBA, + GPUPixelFormatRGB = GL_RGB +} GPUPixelFormat; + +typedef enum { + GPUPixelTypeUByte = GL_UNSIGNED_BYTE, + GPUPixelTypeFloat = GL_FLOAT +} GPUPixelType; + +@interface GPUImageRawDataInput : GPUImageOutput +{ + CGSize uploadedImageSize; + + dispatch_semaphore_t dataUpdateSemaphore; +} + +// Initialization and teardown +- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize; +- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat; +- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType; + +/** Input data pixel format + */ +@property (readwrite, nonatomic) GPUPixelFormat pixelFormat; +@property (readwrite, nonatomic) GPUPixelType pixelType; + +// Image rendering +- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize; +- (void)processData; +- (CGSize)outputImageSize; + +@end diff --git a/GPUImage/Source/GPUImageRawDataInput.m b/GPUImage/Source/GPUImageRawDataInput.m new file mode 100644 index 0000000..3c03516 --- /dev/null +++ b/GPUImage/Source/GPUImageRawDataInput.m @@ -0,0 +1,113 @@ +#import "GPUImageRawDataInput.h" + +@interface GPUImageRawDataInput() +- (void)uploadBytes:(GLubyte *)bytesToUpload; +@end + +@implementation GPUImageRawDataInput + +@synthesize pixelFormat = _pixelFormat; +@synthesize pixelType = _pixelType; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize; +{ + if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte])) + { + return nil; + } + + return self; +} + +- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat; +{ + if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:pixelFormat type:GPUPixelTypeUByte])) + { + return nil; + } + + return self; +} + +- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType; +{ + if (!(self = [super init])) + { + return nil; + } + + dataUpdateSemaphore = dispatch_semaphore_create(1); + + uploadedImageSize = imageSize; + self.pixelFormat = pixelFormat; + self.pixelType = pixelType; + + [self uploadBytes:bytesToUpload]; + + return self; +} + +// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required. +- (void)dealloc; +{ +#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) ) + if (dataUpdateSemaphore != NULL) + { + dispatch_release(dataUpdateSemaphore); + } +#endif +} + +#pragma mark - +#pragma mark Image rendering + +- (void)uploadBytes:(GLubyte *)bytesToUpload; +{ + [GPUImageContext useImageProcessingContext]; + + [self initializeOutputTextureIfNeeded]; + + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexImage2D(GL_TEXTURE_2D, 0, _pixelFormat==GPUPixelFormatRGB ? GL_RGB : GL_RGBA, (int)uploadedImageSize.width, (int)uploadedImageSize.height, 0, (GLint)_pixelFormat, (GLenum)_pixelType, bytesToUpload); +} + +- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize; +{ + uploadedImageSize = imageSize; + + [self uploadBytes:bytesToUpload]; +} + +- (void)processData; +{ + if (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0) + { + return; + } + + runAsynchronouslyOnVideoProcessingQueue(^{ + + CGSize pixelSizeOfImage = [self outputImageSize]; + + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget]; + [currentTarget newFrameReadyAtTime:kCMTimeInvalid atIndex:textureIndexOfTarget]; + } + + dispatch_semaphore_signal(dataUpdateSemaphore); + }); +} + +- (CGSize)outputImageSize; +{ + return uploadedImageSize; +} + +@end diff --git a/GPUImage/Source/GPUImageRawDataOutput.h b/GPUImage/Source/GPUImageRawDataOutput.h new file mode 100755 index 0000000..762e8eb --- /dev/null +++ b/GPUImage/Source/GPUImageRawDataOutput.h @@ -0,0 +1,51 @@ +#import +#import "GPUImageContext.h" + +struct GPUByteColorVector { + GLubyte red; + GLubyte green; + GLubyte blue; + GLubyte alpha; +}; +typedef struct GPUByteColorVector GPUByteColorVector; + +@protocol GPUImageRawDataProcessor; + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +@interface GPUImageRawDataOutput : NSObject { + CGSize imageSize; + CVOpenGLESTextureCacheRef rawDataTextureCache; + CVPixelBufferRef renderTarget; + GPUImageRotationMode inputRotation; + BOOL outputBGRA; + CVOpenGLESTextureRef renderTexture; + + __unsafe_unretained id textureDelegate; +} +#else +@interface GPUImageRawDataOutput : NSObject { + CGSize imageSize; + CVOpenGLTextureCacheRef rawDataTextureCache; + CVPixelBufferRef renderTarget; + GPUImageRotationMode inputRotation; + BOOL outputBGRA; + CVOpenGLTextureRef renderTexture; + + __unsafe_unretained id textureDelegate; +} +#endif + +@property(readonly) GLubyte *rawBytesForImage; +@property(nonatomic, copy) void(^newFrameAvailableBlock)(void); +@property(nonatomic) BOOL enabled; + +// Initialization and teardown +- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat; + +// Data access +- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage; +- (NSUInteger)bytesPerRowInOutput; + +- (void)setImageSize:(CGSize)newImageSize; + +@end diff --git a/GPUImage/Source/GPUImageRawDataOutput.m b/GPUImage/Source/GPUImageRawDataOutput.m new file mode 100755 index 0000000..5377a51 --- /dev/null +++ b/GPUImage/Source/GPUImageRawDataOutput.m @@ -0,0 +1,482 @@ +#import "GPUImageRawDataOutput.h" + +#import "GPUImageContext.h" +#import "GLProgram.h" +#import "GPUImageFilter.h" +#import "GPUImageMovieWriter.h" + +@interface GPUImageRawDataOutput () +{ + + BOOL hasReadFromTheCurrentFrame; + + GLuint dataFramebuffer, dataRenderbuffer; + + GLuint inputTextureForDisplay; + + GLProgram *dataProgram; + GLint dataPositionAttribute, dataTextureCoordinateAttribute; + GLint dataInputTextureUniform; + + GLubyte *_rawBytesForImage; +} + +// Frame rendering +- (void)createDataFBO; +- (void)destroyDataFBO; +- (void)setFilterFBO; + +- (void)renderAtInternalSize; + +@end + +@implementation GPUImageRawDataOutput + +@synthesize rawBytesForImage = _rawBytesForImage; +@synthesize newFrameAvailableBlock = _newFrameAvailableBlock; +@synthesize enabled; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat; +{ + if (!(self = [super init])) + { + return nil; + } + + self.enabled = YES; + outputBGRA = resultsInBGRAFormat; + imageSize = newImageSize; + hasReadFromTheCurrentFrame = NO; + _rawBytesForImage = NULL; + inputRotation = kGPUImageNoRotation; + + [GPUImageContext useImageProcessingContext]; + if ( (outputBGRA && ![GPUImageContext supportsFastTextureUpload]) || (!outputBGRA && [GPUImageContext supportsFastTextureUpload]) ) + { + dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString]; + } + else + { + dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; + } + + if (!dataProgram.initialized) + { + [dataProgram addAttribute:@"position"]; + [dataProgram addAttribute:@"inputTextureCoordinate"]; + + if (![dataProgram link]) + { + NSString *progLog = [dataProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [dataProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [dataProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + dataProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + dataPositionAttribute = [dataProgram attributeIndex:@"position"]; + dataTextureCoordinateAttribute = [dataProgram attributeIndex:@"inputTextureCoordinate"]; + dataInputTextureUniform = [dataProgram uniformIndex:@"inputImageTexture"]; + + return self; +} + +- (void)dealloc +{ + [self destroyDataFBO]; + + if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload])) + { + free(_rawBytesForImage); + _rawBytesForImage = NULL; + } +} + +#pragma mark - +#pragma mark Frame rendering + +- (void)createDataFBO; +{ + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &dataFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, dataFramebuffer); + + if ([GPUImageContext supportsFastTextureUpload]) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &rawDataTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &rawDataTextureCache); +#endif +#else + CGLContextObj context = (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context]; + CVReturn err = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, + NULL, + context, + CGLGetPixelFormat(context), + NULL, + &rawDataTextureCache); +#endif + + if (err) + { + NSAssert(NO, @"Error at CVOpenGL(ES)TextureCacheCreate %d", err); + } + + // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ + + CFDictionaryRef empty; // empty value for attr value. + CFMutableDictionaryRef attrs; + empty = CFDictionaryCreate(kCFAllocatorDefault, // our empty IOSurface properties dictionary + NULL, + NULL, + 0, + &kCFTypeDictionaryKeyCallBacks, + &kCFTypeDictionaryValueCallBacks); + attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, + 1, + &kCFTypeDictionaryKeyCallBacks, + &kCFTypeDictionaryValueCallBacks); + + CFDictionarySetValue(attrs, + kCVPixelBufferIOSurfacePropertiesKey, + empty); + + //CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget); + + CVPixelBufferCreate(kCFAllocatorDefault, + (int)imageSize.width, + (int)imageSize.height, + kCVPixelFormatType_32BGRA, + attrs, + &renderTarget); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, + rawDataTextureCache, renderTarget, + NULL, // texture attributes + GL_TEXTURE_2D, + GL_RGBA, // opengl format + (int)imageSize.width, + (int)imageSize.height, + GL_BGRA, // native iOS format + GL_UNSIGNED_BYTE, + 0, + &renderTexture); +#else + CVOpenGLTextureCacheCreateTextureFromImage (kCFAllocatorDefault, + rawDataTextureCache, renderTarget, + NULL, + &renderTexture); +#endif + + CFRelease(attrs); + CFRelease(empty); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); +#else + glBindTexture(CVOpenGLTextureGetTarget(renderTexture), CVOpenGLTextureGetName(renderTexture)); +#endif + + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); +#else + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLTextureGetName(renderTexture), 0); +#endif + } + else + { + + glGenRenderbuffers(1, &dataRenderbuffer); + glBindRenderbuffer(GL_RENDERBUFFER, dataRenderbuffer); + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)imageSize.width, (int)imageSize.height); +#else + glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8, (int)imageSize.width, (int)imageSize.height); +#endif + + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, dataRenderbuffer); + } + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); +} + +- (void)destroyDataFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (renderTexture) + { + CFRelease(renderTexture); + renderTexture = NULL; + } + + if (dataFramebuffer) + { + glDeleteFramebuffers(1, &dataFramebuffer); + dataFramebuffer = 0; + } + + if (dataRenderbuffer) + { + glDeleteRenderbuffers(1, &dataRenderbuffer); + dataRenderbuffer = 0; + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + if (rawDataTextureCache) + { + CVOpenGLESTextureCacheFlush(rawDataTextureCache, 0); + CFRelease(rawDataTextureCache); + rawDataTextureCache = 0; + } +#endif + + if (renderTarget) + { + CVPixelBufferRelease(renderTarget); + renderTarget = 0; + } + }); +} + +- (void)setFilterFBO; +{ + if (!dataFramebuffer) + { + [self createDataFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, dataFramebuffer); + + glViewport(0, 0, (int)imageSize.width, (int)imageSize.height); +} + +#pragma mark - +#pragma mark Data access + +- (void)renderAtInternalSize; +{ + [GPUImageContext setActiveShaderProgram:dataProgram]; + [self setFilterFBO]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat textureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay); + glUniform1i(dataInputTextureUniform, 4); + + glVertexAttribPointer(dataPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(dataTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glEnableVertexAttribArray(dataPositionAttribute); + glEnableVertexAttribArray(dataTextureCoordinateAttribute); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage; +{ + GPUByteColorVector *imageColorBytes = (GPUByteColorVector *)self.rawBytesForImage; +// NSLog(@"Row start"); +// for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++) +// { +// GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition]; +// NSLog(@"%d - %d, %d, %d", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue); +// } +// NSLog(@"Row end"); + +// GPUByteColorVector byteAtOne = imageColorBytes[1]; +// GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3]; +// GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width]; +// NSLog(@"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue); + + CGPoint locationToPickFrom = CGPointZero; + locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0)); + locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0)); + + if (outputBGRA) + { + GPUByteColorVector flippedColor = imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))]; + GLubyte temporaryRed = flippedColor.red; + + flippedColor.red = flippedColor.blue; + flippedColor.blue = temporaryRed; + + return flippedColor; + } + else + { + return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))]; + } +} + +#pragma mark - +#pragma mark GPUImageInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + hasReadFromTheCurrentFrame = NO; + + if (_newFrameAvailableBlock != NULL) + { + _newFrameAvailableBlock(); + } +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + inputTextureForDisplay = newInputTexture; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = newInputRotation; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ +} + +- (CGSize)maximumOutputSize; +{ + return imageSize; +} + +- (void)endProcessing; +{ +} + +- (BOOL)shouldIgnoreUpdatesToThisTarget; +{ + return NO; +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + textureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + +} + +#pragma mark - +#pragma mark Accessors + +- (GLubyte *)rawBytesForImage; +{ + if ( (_rawBytesForImage == NULL) && (![GPUImageContext supportsFastTextureUpload]) ) + { + _rawBytesForImage = (GLubyte *) calloc(imageSize.width * imageSize.height * 4, sizeof(GLubyte)); + hasReadFromTheCurrentFrame = NO; + } + + if (hasReadFromTheCurrentFrame) + { + return _rawBytesForImage; + } + else + { + runSynchronouslyOnVideoProcessingQueue(^{ + // Note: the fast texture caches speed up 640x480 frame reads from 9.6 ms to 3.1 ms on iPhone 4S + + [GPUImageContext useImageProcessingContext]; + if ([GPUImageContext supportsFastTextureUpload]) + { + CVPixelBufferUnlockBaseAddress(renderTarget, 0); + // CVOpenGLESTextureCacheFlush(rawDataTextureCache, 0); + } + + [self renderAtInternalSize]; + + if ([GPUImageContext supportsFastTextureUpload]) + { + glFinish(); + CVPixelBufferLockBaseAddress(renderTarget, 0); + _rawBytesForImage = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget); + } + else + { + glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage); + // GL_EXT_read_format_bgra + // glReadPixels(0, 0, imageSize.width, imageSize.height, GL_BGRA_EXT, GL_UNSIGNED_BYTE, _rawBytesForImage); + } + + hasReadFromTheCurrentFrame = YES; + + }); + + return _rawBytesForImage; + } +} + +- (NSUInteger)bytesPerRowInOutput; +{ + if ([GPUImageContext supportsFastTextureUpload]) + { + return CVPixelBufferGetBytesPerRow(renderTarget); + } + else + { + return imageSize.width * 4; + } +} + +- (void)setImageSize:(CGSize)newImageSize { + imageSize = newImageSize; + [self destroyDataFBO]; + if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload])) + { + free(_rawBytesForImage); + _rawBytesForImage = NULL; + } +} + +@end diff --git a/GPUImage/Source/GPUImageSaturationBlendFilter.h b/GPUImage/Source/GPUImageSaturationBlendFilter.h new file mode 100644 index 0000000..767892a --- /dev/null +++ b/GPUImage/Source/GPUImageSaturationBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageSaturationBlendFilter.m b/GPUImage/Source/GPUImageSaturationBlendFilter.m new file mode 100644 index 0000000..da37f6a --- /dev/null +++ b/GPUImage/Source/GPUImageSaturationBlendFilter.m @@ -0,0 +1,213 @@ +#import "GPUImageSaturationBlendFilter.h" + +/** + * Saturation blend mode based upon pseudo code from the PDF specification. + */ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + highp float lum(lowp vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + lowp vec3 clipcolor(lowp vec3 c) { + highp float l = lum(c); + lowp float n = min(min(c.r, c.g), c.b); + lowp float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + lowp vec3 setlum(lowp vec3 c, highp float l) { + highp float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + highp float sat(lowp vec3 c) { + lowp float n = min(min(c.r, c.g), c.b); + lowp float x = max(max(c.r, c.g), c.b); + return x - n; + } + + lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) { + return ((cmid - cmin) * s) / (cmax - cmin); + } + + lowp vec3 setsat(lowp vec3 c, highp float s) { + if (c.r > c.g) { + if (c.r > c.b) { + if (c.g > c.b) { + /* g is mid, b is min */ + c.g = mid(c.b, c.g, c.r, s); + c.b = 0.0; + } else { + /* b is mid, g is min */ + c.b = mid(c.g, c.b, c.r, s); + c.g = 0.0; + } + c.r = s; + } else { + /* b is max, r is mid, g is min */ + c.r = mid(c.g, c.r, c.b, s); + c.b = s; + c.r = 0.0; + } + } else if (c.r > c.b) { + /* g is max, r is mid, b is min */ + c.r = mid(c.b, c.r, c.g, s); + c.g = s; + c.b = 0.0; + } else if (c.g > c.b) { + /* g is max, b is mid, r is min */ + c.b = mid(c.r, c.b, c.g, s); + c.g = s; + c.r = 0.0; + } else if (c.b > c.g) { + /* b is max, g is mid, r is min */ + c.g = mid(c.r, c.g, c.b, s); + c.b = s; + c.r = 0.0; + } else { + c = vec3(0.0); + } + return c; + } + + void main() + { + highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#else +NSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + float lum(vec3 c) { + return dot(c, vec3(0.3, 0.59, 0.11)); + } + + vec3 clipcolor(vec3 c) { + float l = lum(c); + float n = min(min(c.r, c.g), c.b); + float x = max(max(c.r, c.g), c.b); + + if (n < 0.0) { + c.r = l + ((c.r - l) * l) / (l - n); + c.g = l + ((c.g - l) * l) / (l - n); + c.b = l + ((c.b - l) * l) / (l - n); + } + if (x > 1.0) { + c.r = l + ((c.r - l) * (1.0 - l)) / (x - l); + c.g = l + ((c.g - l) * (1.0 - l)) / (x - l); + c.b = l + ((c.b - l) * (1.0 - l)) / (x - l); + } + + return c; + } + + vec3 setlum(vec3 c, float l) { + float d = l - lum(c); + c = c + vec3(d); + return clipcolor(c); + } + + float sat(vec3 c) { + float n = min(min(c.r, c.g), c.b); + float x = max(max(c.r, c.g), c.b); + return x - n; + } + + float mid(float cmin, float cmid, float cmax, float s) { + return ((cmid - cmin) * s) / (cmax - cmin); + } + + vec3 setsat(vec3 c, float s) { + if (c.r > c.g) { + if (c.r > c.b) { + if (c.g > c.b) { + /* g is mid, b is min */ + c.g = mid(c.b, c.g, c.r, s); + c.b = 0.0; + } else { + /* b is mid, g is min */ + c.b = mid(c.g, c.b, c.r, s); + c.g = 0.0; + } + c.r = s; + } else { + /* b is max, r is mid, g is min */ + c.r = mid(c.g, c.r, c.b, s); + c.b = s; + c.r = 0.0; + } + } else if (c.r > c.b) { + /* g is max, r is mid, b is min */ + c.r = mid(c.b, c.r, c.g, s); + c.g = s; + c.b = 0.0; + } else if (c.g > c.b) { + /* g is max, b is mid, r is min */ + c.b = mid(c.r, c.b, c.g, s); + c.g = s; + c.r = 0.0; + } else if (c.b > c.g) { + /* b is max, g is mid, r is min */ + c.g = mid(c.r, c.g, c.b, s); + c.b = s; + c.r = 0.0; + } else { + c = vec3(0.0); + } + return c; + } + + void main() + { + vec4 baseColor = texture2D(inputImageTexture, textureCoordinate); + vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a); + } +); +#endif + + +@implementation GPUImageSaturationBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageSaturationFilter.h b/GPUImage/Source/GPUImageSaturationFilter.h new file mode 100755 index 0000000..1c6ff5b --- /dev/null +++ b/GPUImage/Source/GPUImageSaturationFilter.h @@ -0,0 +1,14 @@ +#import "GPUImageFilter.h" + +/** Adjusts the saturation of an image + */ +@interface GPUImageSaturationFilter : GPUImageFilter +{ + GLint saturationUniform; +} + +/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level + */ +@property(readwrite, nonatomic) CGFloat saturation; + +@end diff --git a/GPUImage/Source/GPUImageSaturationFilter.m b/GPUImage/Source/GPUImageSaturationFilter.m new file mode 100755 index 0000000..fc373d4 --- /dev/null +++ b/GPUImage/Source/GPUImageSaturationFilter.m @@ -0,0 +1,78 @@ +#import "GPUImageSaturationFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float saturation; + + // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham + const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp float luminance = dot(textureColor.rgb, luminanceWeighting); + lowp vec3 greyScaleColor = vec3(luminance); + + gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w); + + } +); +#else +NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float saturation; + + // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham + const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + float luminance = dot(textureColor.rgb, luminanceWeighting); + vec3 greyScaleColor = vec3(luminance); + + gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w); + + } + ); +#endif + +@implementation GPUImageSaturationFilter + +@synthesize saturation = _saturation; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationFragmentShaderString])) + { + return nil; + } + + saturationUniform = [filterProgram uniformIndex:@"saturation"]; + self.saturation = 1.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setSaturation:(CGFloat)newValue; +{ + _saturation = newValue; + + [self setFloat:_saturation forUniform:saturationUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageScreenBlendFilter.h b/GPUImage/Source/GPUImageScreenBlendFilter.h new file mode 100755 index 0000000..2df3abf --- /dev/null +++ b/GPUImage/Source/GPUImageScreenBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageScreenBlendFilter.m b/GPUImage/Source/GPUImageScreenBlendFilter.m new file mode 100755 index 0000000..d871e7d --- /dev/null +++ b/GPUImage/Source/GPUImageScreenBlendFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageScreenBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + mediump vec4 whiteColor = vec4(1.0); + gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor)); + } +); +#else +NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + vec4 whiteColor = vec4(1.0); + gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor)); + } +); +#endif + +@implementation GPUImageScreenBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageScreenBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageSepiaFilter.h b/GPUImage/Source/GPUImageSepiaFilter.h new file mode 100755 index 0000000..a45164f --- /dev/null +++ b/GPUImage/Source/GPUImageSepiaFilter.h @@ -0,0 +1,6 @@ +#import "GPUImageColorMatrixFilter.h" + +/// Simple sepia tone filter +@interface GPUImageSepiaFilter : GPUImageColorMatrixFilter + +@end diff --git a/GPUImage/Source/GPUImageSepiaFilter.m b/GPUImage/Source/GPUImageSepiaFilter.m new file mode 100755 index 0000000..71668d6 --- /dev/null +++ b/GPUImage/Source/GPUImageSepiaFilter.m @@ -0,0 +1,24 @@ +#import "GPUImageSepiaFilter.h" + +@implementation GPUImageSepiaFilter + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + self.intensity = 1.0; + self.colorMatrix = (GPUMatrix4x4){ + {0.3588, 0.7044, 0.1368, 0.0}, + {0.2990, 0.5870, 0.1140, 0.0}, + {0.2392, 0.4696, 0.0912 ,0.0}, + {0,0,0,1.0}, + }; + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageSharpenFilter.h b/GPUImage/Source/GPUImageSharpenFilter.h new file mode 100755 index 0000000..739df50 --- /dev/null +++ b/GPUImage/Source/GPUImageSharpenFilter.h @@ -0,0 +1,12 @@ +#import "GPUImageFilter.h" + +@interface GPUImageSharpenFilter : GPUImageFilter +{ + GLint sharpnessUniform; + GLint imageWidthFactorUniform, imageHeightFactorUniform; +} + +// Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level +@property(readwrite, nonatomic) CGFloat sharpness; + +@end diff --git a/GPUImage/Source/GPUImageSharpenFilter.m b/GPUImage/Source/GPUImageSharpenFilter.m new file mode 100755 index 0000000..6d7367a --- /dev/null +++ b/GPUImage/Source/GPUImageSharpenFilter.m @@ -0,0 +1,147 @@ +#import "GPUImageSharpenFilter.h" + +NSString *const kGPUImageSharpenVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform float imageWidthFactor; + uniform float imageHeightFactor; + uniform float sharpness; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + varying vec2 topTextureCoordinate; + varying vec2 bottomTextureCoordinate; + + varying float centerMultiplier; + varying float edgeMultiplier; + + void main() + { + gl_Position = position; + + vec2 widthStep = vec2(imageWidthFactor, 0.0); + vec2 heightStep = vec2(0.0, imageHeightFactor); + + textureCoordinate = inputTextureCoordinate.xy; + leftTextureCoordinate = inputTextureCoordinate.xy - widthStep; + rightTextureCoordinate = inputTextureCoordinate.xy + widthStep; + topTextureCoordinate = inputTextureCoordinate.xy + heightStep; + bottomTextureCoordinate = inputTextureCoordinate.xy - heightStep; + + centerMultiplier = 1.0 + 4.0 * sharpness; + edgeMultiplier = sharpness; + } +); + + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying highp vec2 textureCoordinate; + varying highp vec2 leftTextureCoordinate; + varying highp vec2 rightTextureCoordinate; + varying highp vec2 topTextureCoordinate; + varying highp vec2 bottomTextureCoordinate; + + varying highp float centerMultiplier; + varying highp float edgeMultiplier; + + uniform sampler2D inputImageTexture; + + void main() + { + mediump vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb; + mediump vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + mediump vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + mediump vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + mediump vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + + gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w); + } +); +#else +NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + varying vec2 topTextureCoordinate; + varying vec2 bottomTextureCoordinate; + + varying float centerMultiplier; + varying float edgeMultiplier; + + uniform sampler2D inputImageTexture; + + void main() + { + vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb; + vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb; + vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb; + vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb; + vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb; + + gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w); + } +); +#endif + + +@implementation GPUImageSharpenFilter + +@synthesize sharpness = _sharpness; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageSharpenVertexShaderString fragmentShaderFromString:kGPUImageSharpenFragmentShaderString])) + { + return nil; + } + + sharpnessUniform = [filterProgram uniformIndex:@"sharpness"]; + self.sharpness = 0.0; + + imageWidthFactorUniform = [filterProgram uniformIndex:@"imageWidthFactor"]; + imageHeightFactorUniform = [filterProgram uniformIndex:@"imageHeightFactor"]; + + return self; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.height); + glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.width); + } + else + { + glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.width); + glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.height); + } + }); +} + +#pragma mark - +#pragma mark Accessors + +- (void)setSharpness:(CGFloat)newValue; +{ + _sharpness = newValue; + + [self setFloat:_sharpness forUniform:sharpnessUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageShiTomasiFeatureDetectionFilter.h b/GPUImage/Source/GPUImageShiTomasiFeatureDetectionFilter.h new file mode 100644 index 0000000..b16ebc0 --- /dev/null +++ b/GPUImage/Source/GPUImageShiTomasiFeatureDetectionFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageHarrisCornerDetectionFilter.h" + +/** Shi-Tomasi feature detector + + This is the Shi-Tomasi feature detector, as described in + J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994. + */ + +@interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter + +// Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5 + +@end diff --git a/GPUImage/Source/GPUImageShiTomasiFeatureDetectionFilter.m b/GPUImage/Source/GPUImageShiTomasiFeatureDetectionFilter.m new file mode 100644 index 0000000..e58cbb4 --- /dev/null +++ b/GPUImage/Source/GPUImageShiTomasiFeatureDetectionFilter.m @@ -0,0 +1,65 @@ +#import "GPUImageShiTomasiFeatureDetectionFilter.h" + +@implementation GPUImageShiTomasiFeatureDetectionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float sensitivity; + + void main() + { + mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb; + + mediump float derivativeDifference = derivativeElements.x - derivativeElements.y; + mediump float zElement = (derivativeElements.z * 2.0) - 1.0; + + // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy) + mediump float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement); + + gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0); + } +); +#else +NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float sensitivity; + + void main() + { + vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb; + + float derivativeDifference = derivativeElements.x - derivativeElements.y; + float zElement = (derivativeElements.z * 2.0) - 1.0; + + // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy) + float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement); + + gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageShiTomasiCornerDetectionFragmentShaderString])) + { + return nil; + } + + self.sensitivity = 1.5; + + return self; +} + + +@end diff --git a/GPUImage/Source/GPUImageSingleComponentGaussianBlurFilter.h b/GPUImage/Source/GPUImageSingleComponentGaussianBlurFilter.h new file mode 100644 index 0000000..934b1e3 --- /dev/null +++ b/GPUImage/Source/GPUImageSingleComponentGaussianBlurFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageGaussianBlurFilter.h" + +// This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image) + +@interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter + +@end diff --git a/GPUImage/Source/GPUImageSingleComponentGaussianBlurFilter.m b/GPUImage/Source/GPUImageSingleComponentGaussianBlurFilter.m new file mode 100644 index 0000000..4ff0d91 --- /dev/null +++ b/GPUImage/Source/GPUImageSingleComponentGaussianBlurFilter.m @@ -0,0 +1,189 @@ +#import "GPUImageSingleComponentGaussianBlurFilter.h" + +@implementation GPUImageSingleComponentGaussianBlurFilter + ++ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImageVertexShaderString; + } + + // First, generate the normal Gaussian weights for a given sigma + GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat)); + GLfloat sumOfWeights = 0.0; + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0))); + + if (currentGaussianWeightIndex == 0) + { + sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex]; + } + else + { + sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex]; + } + } + + // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights; + } + + // From these weights we calculate the offsets to read interpolated values from + NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7); + GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat)); + + for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++) + { + GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1]; + GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2]; + + GLfloat optimizedWeight = firstWeight + secondWeight; + + optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight; + } + + NSMutableString *shaderString = [[NSMutableString alloc] init]; + // Header + [shaderString appendFormat:@"\ + attribute vec4 position;\n\ + attribute vec4 inputTextureCoordinate;\n\ + \n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + gl_Position = position;\n\ + \n\ + vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))]; + + // Inner offset loop + [shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"]; + for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++) + { + [shaderString appendFormat:@"\ + blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\ + blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]]; + } + + // Footer + [shaderString appendString:@"}\n"]; + + free(optimizedGaussianOffsets); + free(standardGaussianWeights); + return shaderString; +} + ++ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma; +{ + if (blurRadius < 1) + { + return kGPUImagePassthroughFragmentShaderString; + } + + // First, generate the normal Gaussian weights for a given sigma + GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat)); + GLfloat sumOfWeights = 0.0; + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0))); + + if (currentGaussianWeightIndex == 0) + { + sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex]; + } + else + { + sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex]; + } + } + + // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance + for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++) + { + standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights; + } + + // From these weights we calculate the offsets to read interpolated values from + NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7); + NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2); + + NSMutableString *shaderString = [[NSMutableString alloc] init]; + + // Header +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + uniform highp float texelWidthOffset;\n\ + uniform highp float texelHeightOffset;\n\ + \n\ + varying highp vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + lowp float sum = 0.0;\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ]; +#else + [shaderString appendFormat:@"\ + uniform sampler2D inputImageTexture;\n\ + uniform float texelWidthOffset;\n\ + uniform float texelHeightOffset;\n\ + \n\ + varying vec2 blurCoordinates[%lu];\n\ + \n\ + void main()\n\ + {\n\ + float sum = 0.0;\n", 1 + (numberOfOptimizedOffsets * 2) ]; +#endif + + // Inner texture loop + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]).r * %f;\n", standardGaussianWeights[0]]; + + for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++) + { + GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1]; + GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2]; + GLfloat optimizedWeight = firstWeight + secondWeight; + + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight]; + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight]; + } + + // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader + if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"]; +#else + [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"]; +#endif + + for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++) + { + GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1]; + GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2]; + + GLfloat optimizedWeight = firstWeight + secondWeight; + GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight; + + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f).r * %f;\n", optimizedOffset, optimizedWeight]; + [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f).r * %f;\n", optimizedOffset, optimizedWeight]; + } + } + + // Footer + [shaderString appendString:@"\ + gl_FragColor = vec4(sum, sum, sum, 1.0);\n\ + }\n"]; + + free(standardGaussianWeights); + return shaderString; +} + + +@end diff --git a/GPUImage/Source/GPUImageSketchFilter.h b/GPUImage/Source/GPUImageSketchFilter.h new file mode 100755 index 0000000..598145a --- /dev/null +++ b/GPUImage/Source/GPUImageSketchFilter.h @@ -0,0 +1,11 @@ +#import "GPUImageSobelEdgeDetectionFilter.h" + +/** Converts video to look like a sketch. + + This is just the Sobel edge detection filter with the colors inverted. + */ +@interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageSketchFilter.m b/GPUImage/Source/GPUImageSketchFilter.m new file mode 100755 index 0000000..3cda220 --- /dev/null +++ b/GPUImage/Source/GPUImageSketchFilter.m @@ -0,0 +1,98 @@ +#import "GPUImageSketchFilter.h" + +@implementation GPUImageSketchFilter + +// Invert the colorspace for a sketch +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING +( + precision mediump float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform float edgeStrength; + + uniform sampler2D inputImageTexture; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = 1.0 - (length(vec2(h, v)) * edgeStrength); + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#else +NSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform float edgeStrength; + + uniform sampler2D inputImageTexture; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = 1.0 - (length(vec2(h, v)) * edgeStrength); + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageSketchFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageSmoothToonFilter.h b/GPUImage/Source/GPUImageSmoothToonFilter.h new file mode 100755 index 0000000..f89caac --- /dev/null +++ b/GPUImage/Source/GPUImageSmoothToonFilter.h @@ -0,0 +1,28 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageGaussianBlurFilter; +@class GPUImageToonFilter; + +/** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise. + */ +@interface GPUImageSmoothToonFilter : GPUImageFilterGroup +{ + GPUImageGaussianBlurFilter *blurFilter; + GPUImageToonFilter *toonFilter; +} + +/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels +@property(readwrite, nonatomic) CGFloat texelWidth; +/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels +@property(readwrite, nonatomic) CGFloat texelHeight; + +/// The radius of the underlying Gaussian blur. The default is 2.0. +@property (readwrite, nonatomic) CGFloat blurRadiusInPixels; + +/// The threshold at which to apply the edges, default of 0.2 +@property(readwrite, nonatomic) CGFloat threshold; + +/// The levels of quantization for the posterization of colors within the scene, with a default of 10.0 +@property(readwrite, nonatomic) CGFloat quantizationLevels; + +@end diff --git a/GPUImage/Source/GPUImageSmoothToonFilter.m b/GPUImage/Source/GPUImageSmoothToonFilter.m new file mode 100755 index 0000000..03828f4 --- /dev/null +++ b/GPUImage/Source/GPUImageSmoothToonFilter.m @@ -0,0 +1,94 @@ +#import "GPUImageSmoothToonFilter.h" +#import "GPUImageGaussianBlurFilter.h" +#import "GPUImageToonFilter.h" + +@implementation GPUImageSmoothToonFilter + +@synthesize threshold; +@synthesize blurRadiusInPixels; +@synthesize quantizationLevels; +@synthesize texelWidth; +@synthesize texelHeight; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: apply a variable Gaussian blur + blurFilter = [[GPUImageGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + + // Second pass: run the Sobel edge detection on this blurred image, along with a posterization effect + toonFilter = [[GPUImageToonFilter alloc] init]; + [self addFilter:toonFilter]; + + // Texture location 0 needs to be the sharp image for both the blur and the second stage processing + [blurFilter addTarget:toonFilter]; + + self.initialFilters = [NSArray arrayWithObject:blurFilter]; + self.terminalFilter = toonFilter; + + self.blurRadiusInPixels = 2.0; + self.threshold = 0.2; + self.quantizationLevels = 10.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setTexelWidth:(CGFloat)newValue; +{ + toonFilter.texelWidth = newValue; +} + +- (CGFloat)texelWidth; +{ + return toonFilter.texelWidth; +} + +- (void)setTexelHeight:(CGFloat)newValue; +{ + toonFilter.texelHeight = newValue; +} + +- (CGFloat)texelHeight; +{ + return toonFilter.texelHeight; +} + +- (void)setThreshold:(CGFloat)newValue; +{ + toonFilter.threshold = newValue; +} + +- (CGFloat)threshold; +{ + return toonFilter.threshold; +} + +- (void)setQuantizationLevels:(CGFloat)newValue; +{ + toonFilter.quantizationLevels = newValue; +} + +- (CGFloat)quantizationLevels; +{ + return toonFilter.quantizationLevels; +} + +@end diff --git a/GPUImage/Source/GPUImageSobelEdgeDetectionFilter.h b/GPUImage/Source/GPUImageSobelEdgeDetectionFilter.h new file mode 100755 index 0000000..d6b2c13 --- /dev/null +++ b/GPUImage/Source/GPUImageSobelEdgeDetectionFilter.h @@ -0,0 +1,16 @@ +#import "GPUImageTwoPassFilter.h" + +@interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter +{ + GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform; + BOOL hasOverriddenImageSizeFactor; +} + +// The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels +@property(readwrite, nonatomic) CGFloat texelWidth; +@property(readwrite, nonatomic) CGFloat texelHeight; + +// The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0. +@property(readwrite, nonatomic) CGFloat edgeStrength; + +@end diff --git a/GPUImage/Source/GPUImageSobelEdgeDetectionFilter.m b/GPUImage/Source/GPUImageSobelEdgeDetectionFilter.m new file mode 100755 index 0000000..449129b --- /dev/null +++ b/GPUImage/Source/GPUImageSobelEdgeDetectionFilter.m @@ -0,0 +1,186 @@ +#import "GPUImageSobelEdgeDetectionFilter.h" +#import "GPUImageGrayscaleFilter.h" +#import "GPUImage3x3ConvolutionFilter.h" + +// Code from "Graphics Shaders: Theory and Practice" by M. Bailey and S. Cunningham +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING +( + precision mediump float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float edgeStrength; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = length(vec2(h, v)) * edgeStrength; + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#else +NSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float edgeStrength; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = length(vec2(h, v)) * edgeStrength; + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#endif + +@implementation GPUImageSobelEdgeDetectionFilter + +@synthesize texelWidth = _texelWidth; +@synthesize texelHeight = _texelHeight; +@synthesize edgeStrength = _edgeStrength; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageSobelEdgeDetectionFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + // Do a luminance pass first to reduce the calculations performed at each fragment in the edge detection phase + + if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString secondStageVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString secondStageFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + hasOverriddenImageSizeFactor = NO; + + texelWidthUniform = [secondFilterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [secondFilterProgram uniformIndex:@"texelHeight"]; + edgeStrengthUniform = [secondFilterProgram uniformIndex:@"edgeStrength"]; + + self.edgeStrength = 1.0; + return self; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + if (!hasOverriddenImageSizeFactor) + { + _texelWidth = 1.0 / filterFrameSize.width; + _texelHeight = 1.0 / filterFrameSize.height; + + runSynchronouslyOnVideoProcessingQueue(^{ + GLProgram *previousProgram = [GPUImageContext sharedImageProcessingContext].currentShaderProgram; + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + glUniform1f(texelWidthUniform, _texelWidth); + glUniform1f(texelHeightUniform, _texelHeight); + [GPUImageContext setActiveShaderProgram:previousProgram]; + }); + } +} + +- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex; +{ + [super setUniformsForProgramAtIndex:programIndex]; + + if (programIndex == 1) + { + glUniform1f(texelWidthUniform, _texelWidth); + glUniform1f(texelHeightUniform, _texelHeight); + } +} + +- (BOOL)wantsMonochromeInput; +{ + return YES; +} + +- (BOOL)providesMonochromeOutput; +{ + return YES; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setTexelWidth:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelWidth = newValue; + + [self setFloat:_texelWidth forUniform:texelWidthUniform program:secondFilterProgram]; +} + +- (void)setTexelHeight:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelHeight = newValue; + + [self setFloat:_texelHeight forUniform:texelHeightUniform program:secondFilterProgram]; +} + +- (void)setEdgeStrength:(CGFloat)newValue; +{ + _edgeStrength = newValue; + + [self setFloat:_edgeStrength forUniform:edgeStrengthUniform program:secondFilterProgram]; +} + + +@end + diff --git a/GPUImage/Source/GPUImageSoftEleganceFilter.h b/GPUImage/Source/GPUImageSoftEleganceFilter.h new file mode 100755 index 0000000..596e156 --- /dev/null +++ b/GPUImage/Source/GPUImageSoftEleganceFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImagePicture; + +/** A photo filter based on Soft Elegance Photoshop action + http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603 + */ + +// Note: If you want to use this effect you have to add +// lookup_soft_elegance_1.png and lookup_soft_elegance_2.png +// from Resources folder to your application bundle. + +@interface GPUImageSoftEleganceFilter : GPUImageFilterGroup +{ + GPUImagePicture *lookupImageSource1; + GPUImagePicture *lookupImageSource2; +} + +@end diff --git a/GPUImage/Source/GPUImageSoftEleganceFilter.m b/GPUImage/Source/GPUImageSoftEleganceFilter.m new file mode 100755 index 0000000..fc643e0 --- /dev/null +++ b/GPUImage/Source/GPUImageSoftEleganceFilter.m @@ -0,0 +1,68 @@ +#import "GPUImageSoftEleganceFilter.h" +#import "GPUImagePicture.h" +#import "GPUImageLookupFilter.h" +#import "GPUImageGaussianBlurFilter.h" +#import "GPUImageAlphaBlendFilter.h" + +@implementation GPUImageSoftEleganceFilter + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + UIImage *image1 = [UIImage imageNamed:@"lookup_soft_elegance_1.png"]; + UIImage *image2 = [UIImage imageNamed:@"lookup_soft_elegance_2.png"]; +#else + NSImage *image1 = [NSImage imageNamed:@"lookup_soft_elegance_1.png"]; + NSImage *image2 = [NSImage imageNamed:@"lookup_soft_elegance_2.png"]; +#endif + + NSAssert(image1 && image2, + @"To use GPUImageSoftEleganceFilter you need to add lookup_soft_elegance_1.png and lookup_soft_elegance_2.png from GPUImage/framework/Resources to your application bundle."); + + lookupImageSource1 = [[GPUImagePicture alloc] initWithImage:image1]; + GPUImageLookupFilter *lookupFilter1 = [[GPUImageLookupFilter alloc] init]; + [self addFilter:lookupFilter1]; + + [lookupImageSource1 addTarget:lookupFilter1 atTextureLocation:1]; + [lookupImageSource1 processImage]; + + GPUImageGaussianBlurFilter *gaussianBlur = [[GPUImageGaussianBlurFilter alloc] init]; + gaussianBlur.blurRadiusInPixels = 10.0; + [lookupFilter1 addTarget:gaussianBlur]; + [self addFilter:gaussianBlur]; + + GPUImageAlphaBlendFilter *alphaBlend = [[GPUImageAlphaBlendFilter alloc] init]; + alphaBlend.mix = 0.14; + [lookupFilter1 addTarget:alphaBlend]; + [gaussianBlur addTarget:alphaBlend]; + [self addFilter:alphaBlend]; + + lookupImageSource2 = [[GPUImagePicture alloc] initWithImage:image2]; + + GPUImageLookupFilter *lookupFilter2 = [[GPUImageLookupFilter alloc] init]; + [alphaBlend addTarget:lookupFilter2]; + [lookupImageSource2 addTarget:lookupFilter2]; + [lookupImageSource2 processImage]; + [self addFilter:lookupFilter2]; + + self.initialFilters = [NSArray arrayWithObjects:lookupFilter1, nil]; + self.terminalFilter = lookupFilter2; + + return self; +} + +-(void)prepareForImageCapture { + [lookupImageSource1 processImage]; + [lookupImageSource2 processImage]; + [super prepareForImageCapture]; +} + +#pragma mark - +#pragma mark Accessors + +@end diff --git a/GPUImage/Source/GPUImageSoftLightBlendFilter.h b/GPUImage/Source/GPUImageSoftLightBlendFilter.h new file mode 100755 index 0000000..13fc877 --- /dev/null +++ b/GPUImage/Source/GPUImageSoftLightBlendFilter.h @@ -0,0 +1,7 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter +{ +} + +@end diff --git a/GPUImage/Source/GPUImageSoftLightBlendFilter.m b/GPUImage/Source/GPUImageSoftLightBlendFilter.m new file mode 100755 index 0000000..368bce0 --- /dev/null +++ b/GPUImage/Source/GPUImageSoftLightBlendFilter.m @@ -0,0 +1,54 @@ +#import "GPUImageSoftLightBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + mediump vec4 base = texture2D(inputImageTexture, textureCoordinate); + mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + lowp float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output + gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a); + } +); +#else +NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 base = texture2D(inputImageTexture, textureCoordinate); + vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2); + + float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output + gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a); + } +); +#endif + +@implementation GPUImageSoftLightBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageSoftLightBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageSolidColorGenerator.h b/GPUImage/Source/GPUImageSolidColorGenerator.h new file mode 100644 index 0000000..8d7a5ed --- /dev/null +++ b/GPUImage/Source/GPUImageSolidColorGenerator.h @@ -0,0 +1,19 @@ +#import "GPUImageFilter.h" + +// This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image +// dimensions, or this won't work correctly + + +@interface GPUImageSolidColorGenerator : GPUImageFilter +{ + GLint colorUniform; + GLint useExistingAlphaUniform; +} + +// This color dictates what the output image will be filled with +@property(readwrite, nonatomic) GPUVector4 color; +@property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO + +- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; + +@end diff --git a/GPUImage/Source/GPUImageSolidColorGenerator.m b/GPUImage/Source/GPUImageSolidColorGenerator.m new file mode 100644 index 0000000..ee9a18d --- /dev/null +++ b/GPUImage/Source/GPUImageSolidColorGenerator.m @@ -0,0 +1,102 @@ +#import "GPUImageSolidColorGenerator.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying highp vec2 textureCoordinate; + uniform sampler2D inputImageTexture; + uniform vec4 color; + uniform float useExistingAlpha; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha)); + } + ); +#else +NSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + uniform sampler2D inputImageTexture; + uniform vec4 color; + uniform float useExistingAlpha; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha)); + } + ); +#endif + +@implementation GPUImageSolidColorGenerator + +@synthesize color = _color; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUSolidColorFragmentShaderString])) + { + return nil; + } + + colorUniform = [filterProgram uniformIndex:@"color"]; + useExistingAlphaUniform = [filterProgram uniformIndex:@"useExistingAlpha"]; + + self.color = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f}; + self.useExistingAlpha = NO; + + return self; +} + + +#pragma mark - +#pragma mark Accessors + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + + if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero)) + { + [self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0]; + } +} + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + [super addTarget:newTarget atTextureLocation:textureLocation]; + + if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero)) + { + [newTarget setInputSize:inputTextureSize atIndex:textureLocation]; + [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation]; + } +} + +- (void)setColor:(GPUVector4)newValue; +{ + [self setColorRed:newValue.one green:newValue.two blue:newValue.three alpha:newValue.four]; +} + +- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; +{ + _color.one = redComponent; + _color.two = greenComponent; + _color.three = blueComponent; + _color.four = alphaComponent; + + [self setVec4:_color forUniform:colorUniform program:filterProgram]; +} + +- (void)setUseExistingAlpha:(BOOL)useExistingAlpha; +{ + _useExistingAlpha = useExistingAlpha; + + [self setInteger:(useExistingAlpha ? 1 : 0) forUniform:useExistingAlphaUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageSourceOverBlendFilter.h b/GPUImage/Source/GPUImageSourceOverBlendFilter.h new file mode 100644 index 0000000..29e3063 --- /dev/null +++ b/GPUImage/Source/GPUImageSourceOverBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageSourceOverBlendFilter.m b/GPUImage/Source/GPUImageSourceOverBlendFilter.m new file mode 100644 index 0000000..432adc4 --- /dev/null +++ b/GPUImage/Source/GPUImageSourceOverBlendFilter.m @@ -0,0 +1,51 @@ +#import "GPUImageSourceOverBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate); + + gl_FragColor = mix(textureColor, textureColor2, textureColor2.a); + } +); +#else +NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate); + + gl_FragColor = mix(textureColor, textureColor2, textureColor2.a); + } + ); +#endif + +@implementation GPUImageSourceOverBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageSourceOverBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageSphereRefractionFilter.h b/GPUImage/Source/GPUImageSphereRefractionFilter.h new file mode 100644 index 0000000..cbbd2af --- /dev/null +++ b/GPUImage/Source/GPUImageSphereRefractionFilter.h @@ -0,0 +1,15 @@ +#import "GPUImageFilter.h" + +@interface GPUImageSphereRefractionFilter : GPUImageFilter +{ + GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform; +} + +/// The center about which to apply the distortion, with a default of (0.5, 0.5) +@property(readwrite, nonatomic) CGPoint center; +/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25 +@property(readwrite, nonatomic) CGFloat radius; +/// The index of refraction for the sphere, with a default of 0.71 +@property(readwrite, nonatomic) CGFloat refractiveIndex; + +@end diff --git a/GPUImage/Source/GPUImageSphereRefractionFilter.m b/GPUImage/Source/GPUImageSphereRefractionFilter.m new file mode 100644 index 0000000..b0f5404 --- /dev/null +++ b/GPUImage/Source/GPUImageSphereRefractionFilter.m @@ -0,0 +1,179 @@ +#import "GPUImageSphereRefractionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp vec2 center; + uniform highp float radius; + uniform highp float aspectRatio; + uniform highp float refractiveIndex; + + void main() + { + highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + highp float distanceFromCenter = distance(center, textureCoordinateToUse); + lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius); + + distanceFromCenter = distanceFromCenter / radius; + + highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter); + highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth)); + + highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex); + + gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere; + } +); +#else +NSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform vec2 center; + uniform float radius; + uniform float aspectRatio; + uniform float refractiveIndex; + + void main() + { + vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio)); + float distanceFromCenter = distance(center, textureCoordinateToUse); + float checkForPresenceWithinSphere = step(distanceFromCenter, radius); + + distanceFromCenter = distanceFromCenter / radius; + + float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter); + vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth)); + + vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex); + + gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere; + } +); +#endif + +@interface GPUImageSphereRefractionFilter () + +- (void)adjustAspectRatio; + +@property (readwrite, nonatomic) CGFloat aspectRatio; + +@end + + +@implementation GPUImageSphereRefractionFilter + +@synthesize center = _center; +@synthesize radius = _radius; +@synthesize aspectRatio = _aspectRatio; +@synthesize refractiveIndex = _refractiveIndex; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageSphereRefractionFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + radiusUniform = [filterProgram uniformIndex:@"radius"]; + aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"]; + centerUniform = [filterProgram uniformIndex:@"center"]; + refractiveIndexUniform = [filterProgram uniformIndex:@"refractiveIndex"]; + + self.radius = 0.25; + self.center = CGPointMake(0.5, 0.5); + self.refractiveIndex = 0.71; + + [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0]; + + return self; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + CGSize oldInputSize = inputTextureSize; + [super setInputSize:newSize atIndex:textureIndex]; + + if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) ) + { + [self adjustAspectRatio]; + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)adjustAspectRatio; +{ + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)]; + } + else + { + [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)]; + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; + [self adjustAspectRatio]; +} + +- (void)forceProcessingAtSize:(CGSize)frameSize; +{ + [super forceProcessingAtSize:frameSize]; + [self adjustAspectRatio]; +} + +- (void)setRadius:(CGFloat)newValue; +{ + _radius = newValue; + + [self setFloat:_radius forUniform:radiusUniform program:filterProgram]; +} + +- (void)setCenter:(CGPoint)newValue; +{ + _center = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +- (void)setAspectRatio:(CGFloat)newValue; +{ + _aspectRatio = newValue; + + [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram]; +} + +- (void)setRefractiveIndex:(CGFloat)newValue; +{ + _refractiveIndex = newValue; + + [self setFloat:_refractiveIndex forUniform:refractiveIndexUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageStillCamera.h b/GPUImage/Source/GPUImageStillCamera.h new file mode 100755 index 0000000..a86d936 --- /dev/null +++ b/GPUImage/Source/GPUImageStillCamera.h @@ -0,0 +1,21 @@ +#import "GPUImageVideoCamera.h" + +void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress); +void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer); + +@interface GPUImageStillCamera : GPUImageVideoCamera + +/** The JPEG compression quality to use when capturing a photo as a JPEG. + */ +@property CGFloat jpegCompressionQuality; + +// Only reliably set inside the context of the completion handler of one of the capture methods +@property (readonly) NSDictionary *currentCaptureMetadata; + +// Photography controls +- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block; +- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block; +- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block; +- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block; + +@end diff --git a/GPUImage/Source/GPUImageStillCamera.m b/GPUImage/Source/GPUImageStillCamera.m new file mode 100755 index 0000000..3de6939 --- /dev/null +++ b/GPUImage/Source/GPUImageStillCamera.m @@ -0,0 +1,290 @@ +// 2448x3264 pixel image = 31,961,088 bytes for uncompressed RGBA + +#import "GPUImageStillCamera.h" + +void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress) +{ + free((void *)baseAddress); +} + +void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer) +{ + // CVPixelBufferCreateWithPlanarBytes for YUV input + + CGSize originalSize = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame)); + + CVPixelBufferLockBaseAddress(cameraFrame, 0); + GLubyte *sourceImageBytes = CVPixelBufferGetBaseAddress(cameraFrame); + CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, sourceImageBytes, CVPixelBufferGetBytesPerRow(cameraFrame) * originalSize.height, NULL); + CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB(); + CGImageRef cgImageFromBytes = CGImageCreate((int)originalSize.width, (int)originalSize.height, 8, 32, CVPixelBufferGetBytesPerRow(cameraFrame), genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault); + + GLubyte *imageData = (GLubyte *) calloc(1, (int)finalSize.width * (int)finalSize.height * 4); + + CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)finalSize.width, (int)finalSize.height, 8, (int)finalSize.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); + CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, finalSize.width, finalSize.height), cgImageFromBytes); + CGImageRelease(cgImageFromBytes); + CGContextRelease(imageContext); + CGColorSpaceRelease(genericRGBColorspace); + CGDataProviderRelease(dataProvider); + + CVPixelBufferRef pixel_buffer = NULL; + CVPixelBufferCreateWithBytes(kCFAllocatorDefault, finalSize.width, finalSize.height, kCVPixelFormatType_32BGRA, imageData, finalSize.width * 4, stillImageDataReleaseCallback, NULL, NULL, &pixel_buffer); + CMVideoFormatDescriptionRef videoInfo = NULL; + CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixel_buffer, &videoInfo); + + CMTime frameTime = CMTimeMake(1, 30); + CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid}; + + CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixel_buffer, YES, NULL, NULL, videoInfo, &timing, sampleBuffer); + CFRelease(videoInfo); + CVPixelBufferRelease(pixel_buffer); +} + +@interface GPUImageStillCamera () +{ + AVCaptureStillImageOutput *photoOutput; +} + +// Methods calling this are responsible for calling dispatch_semaphore_signal(frameRenderingSemaphore) somewhere inside the block +- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block; + +@end + +@implementation GPUImageStillCamera { + BOOL requiresFrontCameraTextureCacheCorruptionWorkaround; +} + +@synthesize currentCaptureMetadata = _currentCaptureMetadata; +@synthesize jpegCompressionQuality = _jpegCompressionQuality; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition; +{ + if (!(self = [super initWithSessionPreset:sessionPreset cameraPosition:cameraPosition])) + { + return nil; + } + + /* Detect iOS version < 6 which require a texture cache corruption workaround */ + requiresFrontCameraTextureCacheCorruptionWorkaround = [[[UIDevice currentDevice] systemVersion] compare:@"6.0" options:NSNumericSearch] == NSOrderedAscending; + + [self.captureSession beginConfiguration]; + + photoOutput = [[AVCaptureStillImageOutput alloc] init]; + + // Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device +// if (captureAsYUV && [GPUImageContext supportsFastTextureUpload]) + if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + { + BOOL supportsFullYUVRange = NO; + NSArray *supportedPixelFormats = photoOutput.availableImageDataCVPixelFormatTypes; + for (NSNumber *currentPixelFormat in supportedPixelFormats) + { + if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + { + supportsFullYUVRange = YES; + } + } + + if (supportsFullYUVRange) + { + [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + else + { + [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + } + else + { + captureAsYUV = NO; + [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + +// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) +// { +// // TODO: Check for full range output and use that if available +// [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; +// } +// else +// { +// [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; +// } + + [self.captureSession addOutput:photoOutput]; + + [self.captureSession commitConfiguration]; + + self.jpegCompressionQuality = 0.8; + + return self; +} + +- (id)init; +{ + if (!(self = [self initWithSessionPreset:AVCaptureSessionPresetPhoto cameraPosition:AVCaptureDevicePositionBack])) + { + return nil; + } + return self; +} + +- (void)removeInputsAndOutputs; +{ + [self.captureSession removeOutput:photoOutput]; + [super removeInputsAndOutputs]; +} + +#pragma mark - +#pragma mark Photography controls + +- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block +{ + NSLog(@"If you want to use the method capturePhotoAsSampleBufferWithCompletionHandler:, you must comment out the line in GPUImageStillCamera.m in the method initWithSessionPreset:cameraPosition: which sets the CVPixelBufferPixelFormatTypeKey, as well as uncomment the rest of the method capturePhotoAsSampleBufferWithCompletionHandler:. However, if you do this you cannot use any of the photo capture methods to take a photo if you also supply a filter."); + + /*dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER); + + [photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) { + block(imageSampleBuffer, error); + }]; + + dispatch_semaphore_signal(frameRenderingSemaphore); + + */ + + return; +} + +- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block; +{ + [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) { + UIImage *filteredPhoto = nil; + + if(!error){ + filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput]; + } + dispatch_semaphore_signal(frameRenderingSemaphore); + + block(filteredPhoto, error); + }]; +} + +- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block; +{ +// reportAvailableMemoryForGPUImage(@"Before Capture"); + + [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) { + NSData *dataForJPEGFile = nil; + + if(!error){ + @autoreleasepool { + UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput]; + dispatch_semaphore_signal(frameRenderingSemaphore); +// reportAvailableMemoryForGPUImage(@"After UIImage generation"); + + dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto,self.jpegCompressionQuality); +// reportAvailableMemoryForGPUImage(@"After JPEG generation"); + } + +// reportAvailableMemoryForGPUImage(@"After autorelease pool"); + }else{ + dispatch_semaphore_signal(frameRenderingSemaphore); + } + + block(dataForJPEGFile, error); + }]; +} + +- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block; +{ + + [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) { + NSData *dataForPNGFile = nil; + + if(!error){ + @autoreleasepool { + UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput]; + dispatch_semaphore_signal(frameRenderingSemaphore); + dataForPNGFile = UIImagePNGRepresentation(filteredPhoto); + } + }else{ + dispatch_semaphore_signal(frameRenderingSemaphore); + } + + block(dataForPNGFile, error); + }]; + + return; +} + +#pragma mark - Private Methods + +- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block +{ + dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER); + + if(photoOutput.isCapturingStillImage){ + block([NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorMaximumStillImageCaptureRequestsExceeded userInfo:nil]); + return; + } + + [photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) { + if(imageSampleBuffer == NULL){ + block(error); + return; + } + + [self conserveMemoryForNextFrame]; + + // For now, resize photos to fix within the max texture size of the GPU + CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer); + + CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame)); + CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:sizeOfPhoto]; + if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU)) + { + CMSampleBufferRef sampleBuffer = NULL; + + if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) + { + NSAssert(NO, @"Error: no downsampling for YUV input in the framework yet"); + } + else + { + GPUImageCreateResizedSampleBuffer(cameraFrame, scaledImageSizeToFitOnGPU, &sampleBuffer); + } + + dispatch_semaphore_signal(frameRenderingSemaphore); + [self captureOutput:photoOutput didOutputSampleBuffer:sampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]]; + dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER); + if (sampleBuffer != NULL) + CFRelease(sampleBuffer); + } + else + { + // This is a workaround for the corrupt images that are sometimes returned when taking a photo with the front camera and using the iOS 5.0 texture caches + AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position]; + if ( (currentCameraPosition != AVCaptureDevicePositionFront) || (![GPUImageContext supportsFastTextureUpload]) || !requiresFrontCameraTextureCacheCorruptionWorkaround) + { + dispatch_semaphore_signal(frameRenderingSemaphore); + [self captureOutput:photoOutput didOutputSampleBuffer:imageSampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]]; + dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER); + } + } + + CFDictionaryRef metadata = CMCopyDictionaryOfAttachments(NULL, imageSampleBuffer, kCMAttachmentMode_ShouldPropagate); + _currentCaptureMetadata = (__bridge_transfer NSDictionary *)metadata; + + block(nil); + + _currentCaptureMetadata = nil; + }]; +} + + + +@end diff --git a/GPUImage/Source/GPUImageStretchDistortionFilter.h b/GPUImage/Source/GPUImageStretchDistortionFilter.h new file mode 100755 index 0000000..0780309 --- /dev/null +++ b/GPUImage/Source/GPUImageStretchDistortionFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +/** Creates a stretch distortion of the image + */ +@interface GPUImageStretchDistortionFilter : GPUImageFilter { + GLint centerUniform; +} + +/** The center about which to apply the distortion, with a default of (0.5, 0.5) + */ +@property(readwrite, nonatomic) CGPoint center; + +@end diff --git a/GPUImage/Source/GPUImageStretchDistortionFilter.m b/GPUImage/Source/GPUImageStretchDistortionFilter.m new file mode 100755 index 0000000..d38cac3 --- /dev/null +++ b/GPUImage/Source/GPUImageStretchDistortionFilter.m @@ -0,0 +1,99 @@ +#import "GPUImageStretchDistortionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp vec2 center; + + void main() + { + highp vec2 normCoord = 2.0 * textureCoordinate - 1.0; + highp vec2 normCenter = 2.0 * center - 1.0; + + normCoord -= normCenter; + mediump vec2 s = sign(normCoord); + normCoord = abs(normCoord); + normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord; + normCoord = s * normCoord; + + normCoord += normCenter; + + mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5; + + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + + } +); +#else +NSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform vec2 center; + + void main() + { + vec2 normCoord = 2.0 * textureCoordinate - 1.0; + vec2 normCenter = 2.0 * center - 1.0; + + normCoord -= normCenter; + vec2 s = sign(normCoord); + normCoord = abs(normCoord); + normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord; + normCoord = s * normCoord; + + normCoord += normCenter; + + vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5; + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse); + } +); +#endif + +@implementation GPUImageStretchDistortionFilter + +@synthesize center = _center; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageStretchDistortionFragmentShaderString])) + { + return nil; + } + + centerUniform = [filterProgram uniformIndex:@"center"]; + + self.center = CGPointMake(0.5, 0.5); + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; +} + +- (void)setCenter:(CGPoint)newValue; +{ + _center = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageSubtractBlendFilter.h b/GPUImage/Source/GPUImageSubtractBlendFilter.h new file mode 100755 index 0000000..8dee821 --- /dev/null +++ b/GPUImage/Source/GPUImageSubtractBlendFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter + +@end diff --git a/GPUImage/Source/GPUImageSubtractBlendFilter.m b/GPUImage/Source/GPUImageSubtractBlendFilter.m new file mode 100755 index 0000000..8938bae --- /dev/null +++ b/GPUImage/Source/GPUImageSubtractBlendFilter.m @@ -0,0 +1,52 @@ +#import "GPUImageSubtractBlendFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a); + } +); +#else +NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2); + + gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a); + } +); +#endif + +@implementation GPUImageSubtractBlendFilter + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageSubtractBlendFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end + diff --git a/GPUImage/Source/GPUImageSwirlFilter.h b/GPUImage/Source/GPUImageSwirlFilter.h new file mode 100755 index 0000000..ed7d012 --- /dev/null +++ b/GPUImage/Source/GPUImageSwirlFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilter.h" + +/** Creates a swirl distortion on the image + */ +@interface GPUImageSwirlFilter : GPUImageFilter +{ + GLint radiusUniform, centerUniform, angleUniform; +} + +/// The center about which to apply the distortion, with a default of (0.5, 0.5) +@property(readwrite, nonatomic) CGPoint center; +/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5 +@property(readwrite, nonatomic) CGFloat radius; +/// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0 +@property(readwrite, nonatomic) CGFloat angle; + +@end diff --git a/GPUImage/Source/GPUImageSwirlFilter.m b/GPUImage/Source/GPUImageSwirlFilter.m new file mode 100755 index 0000000..5462bc6 --- /dev/null +++ b/GPUImage/Source/GPUImageSwirlFilter.m @@ -0,0 +1,123 @@ +#import "GPUImageSwirlFilter.h" + +// Adapted from the shader example here: http://www.geeks3d.com/20110428/shader-library-swirl-post-processing-filter-in-glsl/ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp vec2 center; + uniform highp float radius; + uniform highp float angle; + + void main() + { + highp vec2 textureCoordinateToUse = textureCoordinate; + highp float dist = distance(center, textureCoordinate); + if (dist < radius) + { + textureCoordinateToUse -= center; + highp float percent = (radius - dist) / radius; + highp float theta = percent * percent * angle * 8.0; + highp float s = sin(theta); + highp float c = cos(theta); + textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c))); + textureCoordinateToUse += center; + } + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + + } +); +#else +NSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform vec2 center; + uniform float radius; + uniform float angle; + + void main() + { + vec2 textureCoordinateToUse = textureCoordinate; + float dist = distance(center, textureCoordinate); + if (dist < radius) + { + textureCoordinateToUse -= center; + float percent = (radius - dist) / radius; + float theta = percent * percent * angle * 8.0; + float s = sin(theta); + float c = cos(theta); + textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c))); + textureCoordinateToUse += center; + } + + gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse ); + } +); +#endif + +@implementation GPUImageSwirlFilter + +@synthesize center = _center; +@synthesize radius = _radius; +@synthesize angle = _angle; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageSwirlFragmentShaderString])) + { + return nil; + } + + radiusUniform = [filterProgram uniformIndex:@"radius"]; + angleUniform = [filterProgram uniformIndex:@"angle"]; + centerUniform = [filterProgram uniformIndex:@"center"]; + + self.radius = 0.5; + self.angle = 1.0; + self.center = CGPointMake(0.5, 0.5); + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setCenter:self.center]; +} + +- (void)setRadius:(CGFloat)newValue; +{ + _radius = newValue; + + [self setFloat:_radius forUniform:radiusUniform program:filterProgram]; +} + +- (void)setAngle:(CGFloat)newValue; +{ + _angle = newValue; + + [self setFloat:_angle forUniform:angleUniform program:filterProgram]; +} + +- (void)setCenter:(CGPoint)newValue; +{ + _center = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageTextureInput.h b/GPUImage/Source/GPUImageTextureInput.h new file mode 100755 index 0000000..8190305 --- /dev/null +++ b/GPUImage/Source/GPUImageTextureInput.h @@ -0,0 +1,14 @@ +#import "GPUImageOutput.h" + +@interface GPUImageTextureInput : GPUImageOutput +{ + CGSize textureSize; +} + +// Initialization and teardown +- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize; + +// Image rendering +- (void)processTextureWithFrameTime:(CMTime)frameTime; + +@end diff --git a/GPUImage/Source/GPUImageTextureInput.m b/GPUImage/Source/GPUImageTextureInput.m new file mode 100755 index 0000000..d43b379 --- /dev/null +++ b/GPUImage/Source/GPUImageTextureInput.m @@ -0,0 +1,44 @@ +#import "GPUImageTextureInput.h" + +@implementation GPUImageTextureInput + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize; +{ + if (!(self = [super init])) + { + return nil; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + [self deleteOutputTexture]; + }); + + outputTexture = newInputTexture; + textureSize = newTextureSize; + + return self; +} + +#pragma mark - +#pragma mark Image rendering + +- (void)processTextureWithFrameTime:(CMTime)frameTime; +{ + runAsynchronouslyOnVideoProcessingQueue(^{ + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:textureSize atIndex:targetTextureIndex]; + [currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex]; + } + }); +} + +@end diff --git a/GPUImage/Source/GPUImageTextureOutput.h b/GPUImage/Source/GPUImageTextureOutput.h new file mode 100755 index 0000000..af8442f --- /dev/null +++ b/GPUImage/Source/GPUImageTextureOutput.h @@ -0,0 +1,19 @@ +#import +#import "GPUImageContext.h" + +@protocol GPUImageTextureOutputDelegate; + +@interface GPUImageTextureOutput : NSObject +{ + __unsafe_unretained id textureDelegate; +} + +@property(readwrite, unsafe_unretained, nonatomic) id delegate; +@property(readonly) GLuint texture; +@property(nonatomic) BOOL enabled; + +@end + +@protocol GPUImageTextureOutputDelegate +- (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput; +@end diff --git a/GPUImage/Source/GPUImageTextureOutput.m b/GPUImage/Source/GPUImageTextureOutput.m new file mode 100755 index 0000000..ffa7e4f --- /dev/null +++ b/GPUImage/Source/GPUImageTextureOutput.m @@ -0,0 +1,84 @@ +#import "GPUImageTextureOutput.h" + +@implementation GPUImageTextureOutput + +@synthesize delegate = _delegate; +@synthesize texture = _texture; +@synthesize enabled; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + self.enabled = YES; + + return self; +} + +#pragma mark - +#pragma mark GPUImageInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + [_delegate newFrameReadyFromTextureOutput:self]; +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + _texture = newInputTexture; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ +} + +- (CGSize)maximumOutputSize; +{ + return CGSizeZero; +} + +- (void)endProcessing +{ +} + +- (BOOL)shouldIgnoreUpdatesToThisTarget; +{ + return NO; +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + textureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + +} + +@end diff --git a/GPUImage/Source/GPUImageThreeInputFilter.h b/GPUImage/Source/GPUImageThreeInputFilter.h new file mode 100644 index 0000000..d0e7178 --- /dev/null +++ b/GPUImage/Source/GPUImageThreeInputFilter.h @@ -0,0 +1,21 @@ +#import "GPUImageTwoInputFilter.h" + +extern NSString *const kGPUImageThreeInputTextureVertexShaderString; + +@interface GPUImageThreeInputFilter : GPUImageTwoInputFilter +{ + GLint filterThirdTextureCoordinateAttribute; + GLint filterInputTextureUniform3; + GPUImageRotationMode inputRotation3; + GLuint filterSourceTexture3; + CMTime thirdFrameTime; + + BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo; + BOOL thirdFrameCheckDisabled; + + __unsafe_unretained id thirdTextureDelegate; +} + +- (void)disableThirdFrameCheck; + +@end diff --git a/GPUImage/Source/GPUImageThreeInputFilter.m b/GPUImage/Source/GPUImageThreeInputFilter.m new file mode 100644 index 0000000..db5ccd8 --- /dev/null +++ b/GPUImage/Source/GPUImageThreeInputFilter.m @@ -0,0 +1,341 @@ +#import "GPUImageThreeInputFilter.h" + + +NSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + attribute vec4 inputTextureCoordinate2; + attribute vec4 inputTextureCoordinate3; + + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + varying vec2 textureCoordinate3; + + void main() + { + gl_Position = position; + textureCoordinate = inputTextureCoordinate.xy; + textureCoordinate2 = inputTextureCoordinate2.xy; + textureCoordinate3 = inputTextureCoordinate3.xy; + } +); + +@implementation GPUImageThreeInputFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [self initWithVertexShaderFromString:kGPUImageThreeInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + inputRotation3 = kGPUImageNoRotation; + + hasSetSecondTexture = NO; + + hasReceivedThirdFrame = NO; + thirdFrameWasVideo = NO; + thirdFrameCheckDisabled = NO; + + thirdFrameTime = kCMTimeInvalid; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + filterThirdTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate3"]; + + filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture3" for the third input texture in the fragment shader + glEnableVertexAttribArray(filterThirdTextureCoordinateAttribute); + }); + + return self; +} + +- (void)initializeAttributes; +{ + [super initializeAttributes]; + [filterProgram addAttribute:@"inputTextureCoordinate3"]; +} + +- (void)disableThirdFrameCheck; +{ + thirdFrameCheckDisabled = YES; +} + +#pragma mark - +#pragma mark Rendering + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + [GPUImageContext setActiveShaderProgram:filterProgram]; + [self setFilterFBO]; + [self setUniformsForProgramAtIndex:0]; + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, sourceTexture); + glUniform1i(filterInputTextureUniform, 2); + + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, filterSourceTexture2); + glUniform1i(filterInputTextureUniform2, 3); + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, filterSourceTexture3); + glUniform1i(filterInputTextureUniform3, 4); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]); + glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)releaseInputTexturesIfNeeded; +{ + if (shouldConserveMemoryForNextFrame) + { + [firstTextureDelegate textureNoLongerNeededForTarget:self]; + [secondTextureDelegate textureNoLongerNeededForTarget:self]; + [thirdTextureDelegate textureNoLongerNeededForTarget:self]; + shouldConserveMemoryForNextFrame = NO; + } +} + +#pragma mark - +#pragma mark GPUImageInput + +- (NSInteger)nextAvailableTextureIndex; +{ + if (hasSetSecondTexture) + { + return 2; + } + else if (hasSetFirstTexture) + { + return 1; + } + else + { + return 0; + } +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + filterSourceTexture = newInputTexture; + hasSetFirstTexture = YES; + } + else if (textureIndex == 1) + { + filterSourceTexture2 = newInputTexture; + hasSetSecondTexture = YES; + } + else + { + filterSourceTexture3 = newInputTexture; + } +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + [super setInputSize:newSize atIndex:textureIndex]; + + if (CGSizeEqualToSize(newSize, CGSizeZero)) + { + hasSetFirstTexture = NO; + } + } + else if (textureIndex == 1) + { + if (CGSizeEqualToSize(newSize, CGSizeZero)) + { + hasSetSecondTexture = NO; + } + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + inputRotation = newInputRotation; + } + else if (textureIndex == 1) + { + inputRotation2 = newInputRotation; + } + else + { + inputRotation3 = newInputRotation; + } +} + +- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex; +{ + CGSize rotatedSize = sizeToRotate; + + GPUImageRotationMode rotationToCheck; + if (textureIndex == 0) + { + rotationToCheck = inputRotation; + } + else if (textureIndex == 1) + { + rotationToCheck = inputRotation2; + } + else + { + rotationToCheck = inputRotation3; + } + + if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck)) + { + rotatedSize.width = sizeToRotate.height; + rotatedSize.height = sizeToRotate.width; + } + + return rotatedSize; +} + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + // You can set up infinite update loops, so this helps to short circuit them + if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) + { + return; + } + + BOOL updatedMovieFrameOppositeStillImage = NO; + + if (textureIndex == 0) + { + hasReceivedFirstFrame = YES; + firstFrameTime = frameTime; + if (secondFrameCheckDisabled) + { + hasReceivedSecondFrame = YES; + } + if (thirdFrameCheckDisabled) + { + hasReceivedThirdFrame = YES; + } + + if (!CMTIME_IS_INDEFINITE(frameTime)) + { + if CMTIME_IS_INDEFINITE(secondFrameTime) + { + updatedMovieFrameOppositeStillImage = YES; + } + } + } + else if (textureIndex == 1) + { + hasReceivedSecondFrame = YES; + secondFrameTime = frameTime; + if (firstFrameCheckDisabled) + { + hasReceivedFirstFrame = YES; + } + if (thirdFrameCheckDisabled) + { + hasReceivedThirdFrame = YES; + } + + if (!CMTIME_IS_INDEFINITE(frameTime)) + { + if CMTIME_IS_INDEFINITE(firstFrameTime) + { + updatedMovieFrameOppositeStillImage = YES; + } + } + } + else + { + hasReceivedThirdFrame = YES; + thirdFrameTime = frameTime; + if (firstFrameCheckDisabled) + { + hasReceivedFirstFrame = YES; + } + if (secondFrameCheckDisabled) + { + hasReceivedSecondFrame = YES; + } + + if (!CMTIME_IS_INDEFINITE(frameTime)) + { + if CMTIME_IS_INDEFINITE(firstFrameTime) + { + updatedMovieFrameOppositeStillImage = YES; + } + } + } + + // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled) + if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) || updatedMovieFrameOppositeStillImage) + { + outputTextureRetainCount = [targets count]; + + static const GLfloat imageVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; + + [self informTargetsAboutNewFrameAtTime:frameTime]; + + hasReceivedFirstFrame = NO; + hasReceivedSecondFrame = NO; + hasReceivedThirdFrame = NO; + } +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + firstTextureDelegate = newTextureDelegate; + } + else if (textureIndex == 1) + { + secondTextureDelegate = newTextureDelegate; + } + else + { + thirdTextureDelegate = newTextureDelegate; + } +} + + +@end diff --git a/GPUImage/Source/GPUImageThresholdEdgeDetectionFilter.h b/GPUImage/Source/GPUImageThresholdEdgeDetectionFilter.h new file mode 100755 index 0000000..2036030 --- /dev/null +++ b/GPUImage/Source/GPUImageThresholdEdgeDetectionFilter.h @@ -0,0 +1,12 @@ +#import "GPUImageSobelEdgeDetectionFilter.h" + +@interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter +{ + GLint thresholdUniform; +} + +/** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default + */ +@property(readwrite, nonatomic) CGFloat threshold; + +@end diff --git a/GPUImage/Source/GPUImageThresholdEdgeDetectionFilter.m b/GPUImage/Source/GPUImageThresholdEdgeDetectionFilter.m new file mode 100755 index 0000000..553c600 --- /dev/null +++ b/GPUImage/Source/GPUImageThresholdEdgeDetectionFilter.m @@ -0,0 +1,145 @@ +#import "GPUImageThresholdEdgeDetectionFilter.h" + +@implementation GPUImageThresholdEdgeDetectionFilter + +// Invert the colorspace for a sketch +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float threshold; + + uniform float edgeStrength; + + void main() + { +// float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; +// float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; +// float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; +// float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r; +// float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; +// float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; +// float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + leftIntensity + 2.0 * centerIntensity + rightIntensity; +// float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomIntensity + 2.0 * centerIntensity + topIntensity; + float h = (centerIntensity - topIntensity) + (bottomIntensity - centerIntensity); + float v = (centerIntensity - leftIntensity) + (rightIntensity - centerIntensity); +// float h = (centerIntensity - topIntensity); +// float j = (topIntensity - centerIntensity); +// h = max(h,j); +// j = abs(h); +// float v = (centerIntensity - leftIntensity); + + float mag = length(vec2(h, v)) * edgeStrength; + mag = step(threshold, mag); + +// float mag = abs(h); + +// gl_FragColor = vec4(h, h, h, 1.0); +// gl_FragColor = vec4(texture2D(inputImageTexture, textureCoordinate)); +// gl_FragColor = vec4(h, centerIntensity, j, 1.0); + gl_FragColor = vec4(mag, mag, mag, 1.0); + } +); +#else +NSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float threshold; + + uniform float edgeStrength; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + h = max(0.0, h); + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + v = max(0.0, v); + + float mag = length(vec2(h, v)) * edgeStrength; + mag = step(threshold, mag); + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +@synthesize threshold = _threshold; + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + thresholdUniform = [secondFilterProgram uniformIndex:@"threshold"]; + self.threshold = 0.25; + self.edgeStrength = 1.0; + + return self; +} + + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdEdgeDetectionFragmentShaderString])) + { + return nil; + } + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setThreshold:(CGFloat)newValue; +{ + _threshold = newValue; + + [self setFloat:_threshold forUniform:thresholdUniform program:secondFilterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageThresholdSketchFilter.h b/GPUImage/Source/GPUImageThresholdSketchFilter.h new file mode 100644 index 0000000..fda5897 --- /dev/null +++ b/GPUImage/Source/GPUImageThresholdSketchFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageThresholdEdgeDetectionFilter.h" + +@interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter + +@end diff --git a/GPUImage/Source/GPUImageThresholdSketchFilter.m b/GPUImage/Source/GPUImageThresholdSketchFilter.m new file mode 100644 index 0000000..d24e9de --- /dev/null +++ b/GPUImage/Source/GPUImageThresholdSketchFilter.m @@ -0,0 +1,103 @@ +#import "GPUImageThresholdSketchFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform lowp float threshold; + uniform float edgeStrength; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = (length(vec2(h, v)) * edgeStrength); + mag = step(threshold, mag); + mag = 1.0 - mag; + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#else +NSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + uniform float threshold; + uniform float edgeStrength; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = 1.0 - length(vec2(h, v) * edgeStrength); + mag = step(threshold, mag); + + gl_FragColor = vec4(vec3(mag), 1.0); + } +); +#endif + +@implementation GPUImageThresholdSketchFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdSketchFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h b/GPUImage/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h new file mode 100644 index 0000000..9c6e5d7 --- /dev/null +++ b/GPUImage/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h @@ -0,0 +1,14 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +@interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter +{ + GLint thresholdUniform; +} + +/** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default + */ +@property(readwrite, nonatomic) CGFloat threshold; + +- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace; + +@end diff --git a/GPUImage/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m b/GPUImage/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m new file mode 100644 index 0000000..883bba9 --- /dev/null +++ b/GPUImage/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m @@ -0,0 +1,264 @@ +#import "GPUImageThresholdedNonMaximumSuppressionFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying highp vec2 textureCoordinate; + varying highp vec2 leftTextureCoordinate; + varying highp vec2 rightTextureCoordinate; + + varying highp vec2 topTextureCoordinate; + varying highp vec2 topLeftTextureCoordinate; + varying highp vec2 topRightTextureCoordinate; + + varying highp vec2 bottomTextureCoordinate; + varying highp vec2 bottomLeftTextureCoordinate; + varying highp vec2 bottomRightTextureCoordinate; + + uniform lowp float threshold; + + void main() + { + lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r; + lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r; + lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r; + lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r; + lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r; + lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + + // Use a tiebreaker for pixels to the left and immediately above this one + lowp float multiplier = 1.0 - step(centerColor.r, topColor); + multiplier = multiplier * 1.0 - step(centerColor.r, topLeftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, leftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, bottomLeftColor); + + lowp float maxValue = max(centerColor.r, bottomColor); + maxValue = max(maxValue, bottomRightColor); + maxValue = max(maxValue, rightColor); + maxValue = max(maxValue, topRightColor); + + lowp float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier; + finalValue = step(threshold, finalValue); + + gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0); +// +// gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0); + } +); + +NSString *const kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying highp vec2 textureCoordinate; + varying highp vec2 leftTextureCoordinate; + varying highp vec2 rightTextureCoordinate; + + varying highp vec2 topTextureCoordinate; + varying highp vec2 topLeftTextureCoordinate; + varying highp vec2 topRightTextureCoordinate; + + varying highp vec2 bottomTextureCoordinate; + varying highp vec2 bottomLeftTextureCoordinate; + varying highp vec2 bottomRightTextureCoordinate; + + uniform lowp float threshold; + + highp float encodedIntensity(highp vec3 sourceColor) + { + return (sourceColor.b * 256.0 + sourceColor.g + sourceColor.r / 256.0); + } + + void main() + { + highp float bottomColor = encodedIntensity(texture2D(inputImageTexture, bottomTextureCoordinate).rgb); + highp float bottomLeftColor = encodedIntensity(texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb); + highp float bottomRightColor = encodedIntensity(texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb); + highp float centerColor = encodedIntensity(texture2D(inputImageTexture, textureCoordinate).rgb); + highp float leftColor = encodedIntensity(texture2D(inputImageTexture, leftTextureCoordinate).rgb); + highp float rightColor = encodedIntensity(texture2D(inputImageTexture, rightTextureCoordinate).rgb); + highp float topColor = encodedIntensity(texture2D(inputImageTexture, topTextureCoordinate).rgb); + highp float topRightColor = encodedIntensity(texture2D(inputImageTexture, topRightTextureCoordinate).rgb); + highp float topLeftColor = encodedIntensity(texture2D(inputImageTexture, topLeftTextureCoordinate).rgb); + + // Use a tiebreaker for pixels to the left and immediately above this one + highp float multiplier = 1.0 - step(centerColor, topColor); + multiplier = multiplier * 1.0 - step(centerColor, topLeftColor); + multiplier = multiplier * 1.0 - step(centerColor, leftColor); + multiplier = multiplier * 1.0 - step(centerColor, bottomLeftColor); + + highp float maxValue = max(centerColor, bottomColor); + maxValue = max(maxValue, bottomRightColor); + maxValue = max(maxValue, rightColor); + maxValue = max(maxValue, topRightColor); + + highp float finalValue = centerColor * step(maxValue, centerColor) * multiplier; +// highp float finalValue = step(maxValue, centerColor) * multiplier; + finalValue = step(threshold, finalValue); + + gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0); +// gl_FragColor = vec4(finalValue, centerColor, centerColor, 1.0); +// gl_FragColor = vec4(vec3(centerColor), 1.0); + // + // gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0); + } +); +#else +NSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform float threshold; + + void main() + { + float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r; + float topColor = texture2D(inputImageTexture, topTextureCoordinate).r; + float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + + // Use a tiebreaker for pixels to the left and immediately above this one + float multiplier = 1.0 - step(centerColor.r, topColor); + multiplier = multiplier * 1.0 - step(centerColor.r, topLeftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, leftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, bottomLeftColor); + + float maxValue = max(centerColor.r, bottomColor); + maxValue = max(maxValue, bottomRightColor); + maxValue = max(maxValue, rightColor); + maxValue = max(maxValue, topRightColor); + + float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier; + finalValue = step(threshold, finalValue); + + gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0); + // + // gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0); + } +); + +NSString *const kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform float threshold; + + void main() + { + float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + vec4 centerColor = texture2D(inputImageTexture, textureCoordinate); + float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r; + float topColor = texture2D(inputImageTexture, topTextureCoordinate).r; + float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + + // Use a tiebreaker for pixels to the left and immediately above this one + float multiplier = 1.0 - step(centerColor.r, topColor); + multiplier = multiplier * 1.0 - step(centerColor.r, topLeftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, leftColor); + multiplier = multiplier * 1.0 - step(centerColor.r, bottomLeftColor); + + float maxValue = max(centerColor.r, bottomColor); + maxValue = max(maxValue, bottomRightColor); + maxValue = max(maxValue, rightColor); + maxValue = max(maxValue, topRightColor); + + float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier; + finalValue = step(threshold, finalValue); + + gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0); + // + // gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0); + } + ); +#endif + +@implementation GPUImageThresholdedNonMaximumSuppressionFilter + +@synthesize threshold = _threshold; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithPackedColorspace:NO])) + { + return nil; + } + + return self; +} + +- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace; +{ + NSString *shaderString; + if (inputUsesPackedColorspace) + { + shaderString = kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString; + } + else + { + shaderString = kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString; + } + + + if (!(self = [super initWithFragmentShaderFromString:shaderString])) + { + return nil; + } + + thresholdUniform = [filterProgram uniformIndex:@"threshold"]; + self.threshold = 0.9; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setThreshold:(CGFloat)newValue; +{ + _threshold = newValue; + + [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageTiltShiftFilter.h b/GPUImage/Source/GPUImageTiltShiftFilter.h new file mode 100755 index 0000000..e41adee --- /dev/null +++ b/GPUImage/Source/GPUImageTiltShiftFilter.h @@ -0,0 +1,24 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageGaussianBlurFilter; + +/// A simulated tilt shift lens effect +@interface GPUImageTiltShiftFilter : GPUImageFilterGroup +{ + GPUImageGaussianBlurFilter *blurFilter; + GPUImageFilter *tiltShiftFilter; +} + +/// The radius of the underlying blur, in pixels. This is 7.0 by default. +@property(readwrite, nonatomic) CGFloat blurRadiusInPixels; + +/// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4 +@property(readwrite, nonatomic) CGFloat topFocusLevel; + +/// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6 +@property(readwrite, nonatomic) CGFloat bottomFocusLevel; + +/// The rate at which the image gets blurry away from the in-focus region, default 0.2 +@property(readwrite, nonatomic) CGFloat focusFallOffRate; + +@end diff --git a/GPUImage/Source/GPUImageTiltShiftFilter.m b/GPUImage/Source/GPUImageTiltShiftFilter.m new file mode 100755 index 0000000..e2f632b --- /dev/null +++ b/GPUImage/Source/GPUImageTiltShiftFilter.m @@ -0,0 +1,126 @@ +#import "GPUImageTiltShiftFilter.h" +#import "GPUImageFilter.h" +#import "GPUImageTwoInputFilter.h" +#import "GPUImageGaussianBlurFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform highp float topFocusLevel; + uniform highp float bottomFocusLevel; + uniform highp float focusFallOffRate; + + void main() + { + lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2); + + lowp float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y); + blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y); + + gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity); + } +); +#else +NSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform float topFocusLevel; + uniform float bottomFocusLevel; + uniform float focusFallOffRate; + + void main() + { + vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate); + vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2); + + float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y); + blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y); + + gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity); + } +); +#endif + +@implementation GPUImageTiltShiftFilter + +@synthesize blurRadiusInPixels; +@synthesize topFocusLevel = _topFocusLevel; +@synthesize bottomFocusLevel = _bottomFocusLevel; +@synthesize focusFallOffRate = _focusFallOffRate; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: apply a variable Gaussian blur + blurFilter = [[GPUImageGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + + // Second pass: combine the blurred image with the original sharp one + tiltShiftFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageTiltShiftFragmentShaderString]; + [self addFilter:tiltShiftFilter]; + + // Texture location 0 needs to be the sharp image for both the blur and the second stage processing + [blurFilter addTarget:tiltShiftFilter atTextureLocation:1]; + + // To prevent double updating of this filter, disable updates from the sharp image side +// self.inputFilterToIgnoreForUpdates = tiltShiftFilter; + + self.initialFilters = [NSArray arrayWithObjects:blurFilter, tiltShiftFilter, nil]; + self.terminalFilter = tiltShiftFilter; + + self.topFocusLevel = 0.4; + self.bottomFocusLevel = 0.6; + self.focusFallOffRate = 0.2; + self.blurRadiusInPixels = 7.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setTopFocusLevel:(CGFloat)newValue; +{ + _topFocusLevel = newValue; + [tiltShiftFilter setFloat:newValue forUniformName:@"topFocusLevel"]; +} + +- (void)setBottomFocusLevel:(CGFloat)newValue; +{ + _bottomFocusLevel = newValue; + [tiltShiftFilter setFloat:newValue forUniformName:@"bottomFocusLevel"]; +} + +- (void)setFocusFallOffRate:(CGFloat)newValue; +{ + _focusFallOffRate = newValue; + [tiltShiftFilter setFloat:newValue forUniformName:@"focusFallOffRate"]; +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageToneCurveFilter.h b/GPUImage/Source/GPUImageToneCurveFilter.h new file mode 100755 index 0000000..ff4ae92 --- /dev/null +++ b/GPUImage/Source/GPUImageToneCurveFilter.h @@ -0,0 +1,30 @@ +#import "GPUImageFilter.h" + +@interface GPUImageToneCurveFilter : GPUImageFilter + +@property(readwrite, nonatomic, copy) NSArray *redControlPoints; +@property(readwrite, nonatomic, copy) NSArray *greenControlPoints; +@property(readwrite, nonatomic, copy) NSArray *blueControlPoints; +@property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints; + +// Initialization and teardown +- (id)initWithACVData:(NSData*)data; + +- (id)initWithACV:(NSString*)curveFilename; +- (id)initWithACVURL:(NSURL*)curveFileURL; + +// This lets you set all three red, green, and blue tone curves at once. +// NOTE: Deprecated this function because this effect can be accomplished +// using the rgbComposite channel rather then setting all 3 R, G, and B channels. +- (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE; + +- (void)setPointsWithACV:(NSString*)curveFilename; +- (void)setPointsWithACVURL:(NSURL*)curveFileURL; + +// Curve calculation +- (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points; +- (NSMutableArray *)splineCurve:(NSArray *)points; +- (NSMutableArray *)secondDerivative:(NSArray *)cgPoints; +- (void)updateToneCurveTexture; + +@end diff --git a/GPUImage/Source/GPUImageToneCurveFilter.m b/GPUImage/Source/GPUImageToneCurveFilter.m new file mode 100644 index 0000000..391cd90 --- /dev/null +++ b/GPUImage/Source/GPUImageToneCurveFilter.m @@ -0,0 +1,610 @@ +#import "GPUImageToneCurveFilter.h" + +#pragma mark - +#pragma mark GPUImageACVFile Helper + +// GPUImageACVFile +// +// ACV File format Parser +// Please refer to http://www.adobe.com/devnet-apps/photoshop/fileformatashtml/PhotoshopFileFormats.htm#50577411_pgfId-1056330 +// + +@interface GPUImageACVFile : NSObject{ + short version; + short totalCurves; + + NSArray *rgbCompositeCurvePoints; + NSArray *redCurvePoints; + NSArray *greenCurvePoints; + NSArray *blueCurvePoints; +} + +@property(strong,nonatomic) NSArray *rgbCompositeCurvePoints; +@property(strong,nonatomic) NSArray *redCurvePoints; +@property(strong,nonatomic) NSArray *greenCurvePoints; +@property(strong,nonatomic) NSArray *blueCurvePoints; + +- (id) initWithACVFileData:(NSData*)data; + + +unsigned short int16WithBytes(Byte* bytes); +@end + +@implementation GPUImageACVFile + +@synthesize rgbCompositeCurvePoints, redCurvePoints, greenCurvePoints, blueCurvePoints; + +- (id) initWithACVFileData:(NSData *)data { + self = [super init]; + if (self != nil) + { + if (data.length == 0) + { + NSLog(@"failed to init ACVFile with data:%@", data); + + return self; + } + + Byte* rawBytes = (Byte*) [data bytes]; + version = int16WithBytes(rawBytes); + rawBytes+=2; + + totalCurves = int16WithBytes(rawBytes); + rawBytes+=2; + + NSMutableArray *curves = [NSMutableArray new]; + + float pointRate = (1.0 / 255); + // The following is the data for each curve specified by count above + for (NSInteger x = 0; x 0) + { + // Sort the array. + NSArray *sortedPoints = [points sortedArrayUsingComparator:^NSComparisonResult(id a, id b) { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + float x1 = [(NSValue *)a CGPointValue].x; + float x2 = [(NSValue *)b CGPointValue].x; +#else + float x1 = [(NSValue *)a pointValue].x; + float x2 = [(NSValue *)b pointValue].x; +#endif + return x1 > x2; + }]; + + // Convert from (0, 1) to (0, 255). + NSMutableArray *convertedPoints = [NSMutableArray arrayWithCapacity:[sortedPoints count]]; + for (int i=0; i<[points count]; i++){ +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGPoint point = [[sortedPoints objectAtIndex:i] CGPointValue]; +#else + NSPoint point = [[sortedPoints objectAtIndex:i] pointValue]; +#endif + point.x = point.x * 255; + point.y = point.y * 255; + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [convertedPoints addObject:[NSValue valueWithCGPoint:point]]; +#else + [convertedPoints addObject:[NSValue valueWithPoint:point]]; +#endif + } + + + NSMutableArray *splinePoints = [self splineCurve:convertedPoints]; + + // If we have a first point like (0.3, 0) we'll be missing some points at the beginning + // that should be 0. +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGPoint firstSplinePoint = [[splinePoints objectAtIndex:0] CGPointValue]; +#else + NSPoint firstSplinePoint = [[splinePoints objectAtIndex:0] pointValue]; +#endif + + if (firstSplinePoint.x > 0) { + for (int i=firstSplinePoint.x; i >= 0; i--) { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGPoint newCGPoint = CGPointMake(i, 0); + [splinePoints insertObject:[NSValue valueWithCGPoint:newCGPoint] atIndex:0]; +#else + NSPoint newNSPoint = NSMakePoint(i, 0); + [splinePoints insertObject:[NSValue valueWithPoint:newNSPoint] atIndex:0]; +#endif + } + } + + // Insert points similarly at the end, if necessary. +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGPoint lastSplinePoint = [[splinePoints lastObject] CGPointValue]; + + if (lastSplinePoint.x < 255) { + for (int i = lastSplinePoint.x + 1; i <= 255; i++) { + CGPoint newCGPoint = CGPointMake(i, 255); + [splinePoints addObject:[NSValue valueWithCGPoint:newCGPoint]]; + } + } +#else + NSPoint lastSplinePoint = [[splinePoints lastObject] pointValue]; + + if (lastSplinePoint.x < 255) { + for (int i = lastSplinePoint.x + 1; i <= 255; i++) { + NSPoint newNSPoint = NSMakePoint(i, 255); + [splinePoints addObject:[NSValue valueWithPoint:newNSPoint]]; + } + } +#endif + + // Prepare the spline points. + NSMutableArray *preparedSplinePoints = [NSMutableArray arrayWithCapacity:[splinePoints count]]; + for (int i=0; i<[splinePoints count]; i++) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + CGPoint newPoint = [[splinePoints objectAtIndex:i] CGPointValue]; +#else + NSPoint newPoint = [[splinePoints objectAtIndex:i] pointValue]; +#endif + CGPoint origPoint = CGPointMake(newPoint.x, newPoint.x); + + float distance = sqrt(pow((origPoint.x - newPoint.x), 2.0) + pow((origPoint.y - newPoint.y), 2.0)); + + if (origPoint.y > newPoint.y) + { + distance = -distance; + } + + [preparedSplinePoints addObject:[NSNumber numberWithFloat:distance]]; + } + + return preparedSplinePoints; + } + + return nil; +} + + +- (NSMutableArray *)splineCurve:(NSArray *)points +{ + NSMutableArray *sdA = [self secondDerivative:points]; + + // [points count] is equal to [sdA count] + NSInteger n = [sdA count]; + if (n < 1) + { + return nil; + } + double sd[n]; + + // From NSMutableArray to sd[n]; + for (int i=0; i 255.0) + { + y = 255.0; + } + else if (y < 0.0) + { + y = 0.0; + } +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + [output addObject:[NSValue valueWithCGPoint:CGPointMake(x, y)]]; +#else + [output addObject:[NSValue valueWithPoint:NSMakePoint(x, y)]]; +#endif + } + } + + // The above always misses the last point because the last point is the last next, so we approach but don't equal it. + [output addObject:[points lastObject]]; + return output; +} + +- (NSMutableArray *)secondDerivative:(NSArray *)points +{ + const NSInteger n = [points count]; + if ((n <= 0) || (n == 1)) + { + return nil; + } + + double matrix[n][3]; + double result[n]; + matrix[0][1]=1; + // What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.) + matrix[0][0]=0; + matrix[0][2]=0; + + for(int i=1;idown) + for(int i=1;iup) + for(NSInteger i=n-2;i>=0;i--) + { + double k = matrix[i][2]/matrix[i+1][1]; + matrix[i][1] -= k*matrix[i+1][0]; + matrix[i][2] = 0; + result[i] -= k*result[i+1]; + } + + double y2[n]; + for(int i=0;i= 256) && ([_greenCurve count] >= 256) && ([_blueCurve count] >= 256) && ([_rgbCompositeCurve count] >= 256)) + { + for (unsigned int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++) + { + // BGRA for upload to texture + GLubyte b = fmin(fmax(currentCurveIndex + [[_blueCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255); + toneCurveByteArray[currentCurveIndex * 4] = fmin(fmax(b + [[_rgbCompositeCurve objectAtIndex:b] floatValue], 0), 255); + GLubyte g = fmin(fmax(currentCurveIndex + [[_greenCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255); + toneCurveByteArray[currentCurveIndex * 4 + 1] = fmin(fmax(g + [[_rgbCompositeCurve objectAtIndex:g] floatValue], 0), 255); + GLubyte r = fmin(fmax(currentCurveIndex + [[_redCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255); + toneCurveByteArray[currentCurveIndex * 4 + 2] = fmin(fmax(r + [[_rgbCompositeCurve objectAtIndex:r] floatValue], 0), 255); + toneCurveByteArray[currentCurveIndex * 4 + 3] = 255; + } + + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GL_BGRA, GL_UNSIGNED_BYTE, toneCurveByteArray); + } + }); +} + +#pragma mark - +#pragma mark Rendering + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + [GPUImageContext setActiveShaderProgram:filterProgram]; + [self setFilterFBO]; + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, sourceTexture); + glUniform1i(filterInputTextureUniform, 2); + + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, toneCurveTexture); + glUniform1i(toneCurveTextureUniform, 3); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +#pragma mark - +#pragma mark Accessors + +- (void)setRGBControlPoints:(NSArray *)points +{ + _redControlPoints = [points copy]; + _redCurve = [self getPreparedSplineCurve:_redControlPoints]; + + _greenControlPoints = [points copy]; + _greenCurve = [self getPreparedSplineCurve:_greenControlPoints]; + + _blueControlPoints = [points copy]; + _blueCurve = [self getPreparedSplineCurve:_blueControlPoints]; + + [self updateToneCurveTexture]; +} + + +- (void)setRgbCompositeControlPoints:(NSArray *)newValue +{ + _rgbCompositeControlPoints = [newValue copy]; + _rgbCompositeCurve = [self getPreparedSplineCurve:_rgbCompositeControlPoints]; + + [self updateToneCurveTexture]; +} + + +- (void)setRedControlPoints:(NSArray *)newValue; +{ + _redControlPoints = [newValue copy]; + _redCurve = [self getPreparedSplineCurve:_redControlPoints]; + + [self updateToneCurveTexture]; +} + + +- (void)setGreenControlPoints:(NSArray *)newValue +{ + _greenControlPoints = [newValue copy]; + _greenCurve = [self getPreparedSplineCurve:_greenControlPoints]; + + [self updateToneCurveTexture]; +} + + +- (void)setBlueControlPoints:(NSArray *)newValue +{ + _blueControlPoints = [newValue copy]; + _blueCurve = [self getPreparedSplineCurve:_blueControlPoints]; + + [self updateToneCurveTexture]; +} + +@end diff --git a/GPUImage/Source/GPUImageToonFilter.h b/GPUImage/Source/GPUImageToonFilter.h new file mode 100755 index 0000000..ef8e17c --- /dev/null +++ b/GPUImage/Source/GPUImageToonFilter.h @@ -0,0 +1,19 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +/** This uses Sobel edge detection to place a black border around objects, + and then it quantizes the colors present in the image to give a cartoon-like quality to the image. + */ +@interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter +{ + GLint thresholdUniform, quantizationLevelsUniform; +} + +/** The threshold at which to apply the edges, default of 0.2 + */ +@property(readwrite, nonatomic) CGFloat threshold; + +/** The levels of quantization for the posterization of colors within the scene, with a default of 10.0 + */ +@property(readwrite, nonatomic) CGFloat quantizationLevels; + +@end diff --git a/GPUImage/Source/GPUImageToonFilter.m b/GPUImage/Source/GPUImageToonFilter.m new file mode 100755 index 0000000..e8ff104 --- /dev/null +++ b/GPUImage/Source/GPUImageToonFilter.m @@ -0,0 +1,149 @@ +#import "GPUImageToonFilter.h" +#import "GPUImageSobelEdgeDetectionFilter.h" +#import "GPUImage3x3ConvolutionFilter.h" + +// Code from "Graphics Shaders: Theory and Practice" by M. Bailey and S. Cunningham +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageToonFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp float intensity; + uniform highp float threshold; + uniform highp float quantizationLevels; + + const highp vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = length(vec2(h, v)); + + vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels; + + float thresholdTest = 1.0 - step(threshold, mag); + + gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a); + } +); +#else +NSString *const kGPUImageToonFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float intensity; + uniform float threshold; + uniform float quantizationLevels; + + const vec3 W = vec3(0.2125, 0.7154, 0.0721); + + void main() + { + vec4 textureColor = texture2D(inputImageTexture, textureCoordinate); + + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + + float mag = length(vec2(h, v)); + + vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels; + + float thresholdTest = 1.0 - step(threshold, mag); + + gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a); + } +); +#endif + +@implementation GPUImageToonFilter + +@synthesize threshold = _threshold; +@synthesize quantizationLevels = _quantizationLevels; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageToonFragmentShaderString])) + { + return nil; + } + + hasOverriddenImageSizeFactor = NO; + + thresholdUniform = [filterProgram uniformIndex:@"threshold"]; + quantizationLevelsUniform = [filterProgram uniformIndex:@"quantizationLevels"]; + + self.threshold = 0.2; + self.quantizationLevels = 10.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setThreshold:(CGFloat)newValue; +{ + _threshold = newValue; + + [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram]; +} + +- (void)setQuantizationLevels:(CGFloat)newValue; +{ + _quantizationLevels = newValue; + + [self setFloat:_quantizationLevels forUniform:quantizationLevelsUniform program:filterProgram]; +} + + +@end + diff --git a/GPUImage/Source/GPUImageTransformFilter.h b/GPUImage/Source/GPUImageTransformFilter.h new file mode 100755 index 0000000..9865b85 --- /dev/null +++ b/GPUImage/Source/GPUImageTransformFilter.h @@ -0,0 +1,19 @@ +#import "GPUImageFilter.h" + +@interface GPUImageTransformFilter : GPUImageFilter +{ + GLint transformMatrixUniform, orthographicMatrixUniform; + GPUMatrix4x4 orthographicMatrix; +} + +// You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input). +@property(readwrite, nonatomic) CGAffineTransform affineTransform; +@property(readwrite, nonatomic) CATransform3D transform3D; + +// This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating +@property(readwrite, nonatomic) BOOL ignoreAspectRatio; + +// sets the anchor point to top left corner +@property(readwrite, nonatomic) BOOL anchorTopLeft; + +@end diff --git a/GPUImage/Source/GPUImageTransformFilter.m b/GPUImage/Source/GPUImageTransformFilter.m new file mode 100755 index 0000000..6cbc5eb --- /dev/null +++ b/GPUImage/Source/GPUImageTransformFilter.m @@ -0,0 +1,262 @@ +#import "GPUImageTransformFilter.h" + +NSString *const kGPUImageTransformVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + + uniform mat4 transformMatrix; + uniform mat4 orthographicMatrix; + + varying vec2 textureCoordinate; + + void main() + { + gl_Position = transformMatrix * vec4(position.xyz, 1.0) * orthographicMatrix; + textureCoordinate = inputTextureCoordinate.xy; + } +); + +@implementation GPUImageTransformFilter + +@synthesize affineTransform; +@synthesize transform3D = _transform3D; +@synthesize ignoreAspectRatio = _ignoreAspectRatio; +@synthesize anchorTopLeft = _anchorTopLeft; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageTransformVertexShaderString fragmentShaderFromString:kGPUImagePassthroughFragmentShaderString])) + { + return nil; + } + + transformMatrixUniform = [filterProgram uniformIndex:@"transformMatrix"]; + orthographicMatrixUniform = [filterProgram uniformIndex:@"orthographicMatrix"]; + + self.transform3D = CATransform3DIdentity; + + return self; +} + +#pragma mark - +#pragma mark Conversion from matrix formats + +- (void)loadOrthoMatrix:(GLfloat *)matrix left:(GLfloat)left right:(GLfloat)right bottom:(GLfloat)bottom top:(GLfloat)top near:(GLfloat)near far:(GLfloat)far; +{ + GLfloat r_l = right - left; + GLfloat t_b = top - bottom; + GLfloat f_n = far - near; + GLfloat tx = - (right + left) / (right - left); + GLfloat ty = - (top + bottom) / (top - bottom); + GLfloat tz = - (far + near) / (far - near); + + float scale = 2.0f; + if (_anchorTopLeft) + { + scale = 4.0f; + tx=-1.0f; + ty=-1.0f; + } + + matrix[0] = scale / r_l; + matrix[1] = 0.0f; + matrix[2] = 0.0f; + matrix[3] = tx; + + matrix[4] = 0.0f; + matrix[5] = scale / t_b; + matrix[6] = 0.0f; + matrix[7] = ty; + + matrix[8] = 0.0f; + matrix[9] = 0.0f; + matrix[10] = scale / f_n; + matrix[11] = tz; + + matrix[12] = 0.0f; + matrix[13] = 0.0f; + matrix[14] = 0.0f; + matrix[15] = 1.0f; +} + +//- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GLfloat *)matrix; +//{ +// // struct CATransform3D +// // { +// // CGFloat m11, m12, m13, m14; +// // CGFloat m21, m22, m23, m24; +// // CGFloat m31, m32, m33, m34; +// // CGFloat m41, m42, m43, m44; +// // }; +// +// matrix[0] = (GLfloat)transform3D->m11; +// matrix[1] = (GLfloat)transform3D->m12; +// matrix[2] = (GLfloat)transform3D->m13; +// matrix[3] = (GLfloat)transform3D->m14; +// matrix[4] = (GLfloat)transform3D->m21; +// matrix[5] = (GLfloat)transform3D->m22; +// matrix[6] = (GLfloat)transform3D->m23; +// matrix[7] = (GLfloat)transform3D->m24; +// matrix[8] = (GLfloat)transform3D->m31; +// matrix[9] = (GLfloat)transform3D->m32; +// matrix[10] = (GLfloat)transform3D->m33; +// matrix[11] = (GLfloat)transform3D->m34; +// matrix[12] = (GLfloat)transform3D->m41; +// matrix[13] = (GLfloat)transform3D->m42; +// matrix[14] = (GLfloat)transform3D->m43; +// matrix[15] = (GLfloat)transform3D->m44; +//} + +- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GPUMatrix4x4 *)matrix; +{ + // struct CATransform3D + // { + // CGFloat m11, m12, m13, m14; + // CGFloat m21, m22, m23, m24; + // CGFloat m31, m32, m33, m34; + // CGFloat m41, m42, m43, m44; + // }; + + GLfloat *mappedMatrix = (GLfloat *)matrix; + + mappedMatrix[0] = (GLfloat)transform3D->m11; + mappedMatrix[1] = (GLfloat)transform3D->m12; + mappedMatrix[2] = (GLfloat)transform3D->m13; + mappedMatrix[3] = (GLfloat)transform3D->m14; + mappedMatrix[4] = (GLfloat)transform3D->m21; + mappedMatrix[5] = (GLfloat)transform3D->m22; + mappedMatrix[6] = (GLfloat)transform3D->m23; + mappedMatrix[7] = (GLfloat)transform3D->m24; + mappedMatrix[8] = (GLfloat)transform3D->m31; + mappedMatrix[9] = (GLfloat)transform3D->m32; + mappedMatrix[10] = (GLfloat)transform3D->m33; + mappedMatrix[11] = (GLfloat)transform3D->m34; + mappedMatrix[12] = (GLfloat)transform3D->m41; + mappedMatrix[13] = (GLfloat)transform3D->m42; + mappedMatrix[14] = (GLfloat)transform3D->m43; + mappedMatrix[15] = (GLfloat)transform3D->m44; +} + +#pragma mark - +#pragma mark GPUImageInput + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + CGSize currentFBOSize = [self sizeOfFBO]; + CGFloat normalizedHeight = currentFBOSize.height / currentFBOSize.width; + + GLfloat adjustedVertices[] = { + -1.0f, -normalizedHeight, + 1.0f, -normalizedHeight, + -1.0f, normalizedHeight, + 1.0f, normalizedHeight, + }; + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + GLfloat adjustedVerticesAnchorTL[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, normalizedHeight, + 1.0f, normalizedHeight, + }; + + static const GLfloat squareVerticesAnchorTL[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + if (_ignoreAspectRatio) + { + if (_anchorTopLeft) + { + [self renderToTextureWithVertices:squareVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; + } + else + { + [self renderToTextureWithVertices:squareVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; + } + } + else + { + if (_anchorTopLeft) + { + [self renderToTextureWithVertices:adjustedVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; + } + else + { + [self renderToTextureWithVertices:adjustedVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation] sourceTexture:filterSourceTexture]; + } + } + + [self informTargetsAboutNewFrameAtTime:frameTime]; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + if (!_ignoreAspectRatio) + { + [self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * filterFrameSize.height / filterFrameSize.width) top:(1.0 * filterFrameSize.height / filterFrameSize.width) near:-1.0 far:1.0]; + // [self loadOrthoMatrix:orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) top:(1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) near:-2.0 far:2.0]; + + [self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram]; + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setAffineTransform:(CGAffineTransform)newValue; +{ + self.transform3D = CATransform3DMakeAffineTransform(newValue); +} + +- (CGAffineTransform)affineTransform; +{ + return CATransform3DGetAffineTransform(self.transform3D); +} + +- (void)setTransform3D:(CATransform3D)newValue; +{ + _transform3D = newValue; + + GPUMatrix4x4 temporaryMatrix; + + [self convert3DTransform:&_transform3D toMatrix:&temporaryMatrix]; + [self setMatrix4f:temporaryMatrix forUniform:transformMatrixUniform program:filterProgram]; +} + +- (void)setIgnoreAspectRatio:(BOOL)newValue; +{ + _ignoreAspectRatio = newValue; + + if (_ignoreAspectRatio) + { + [self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:-1.0 top:1.0 near:-1.0 far:1.0]; + [self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram]; + } + else + { + [self setupFilterForSize:[self sizeOfFBO]]; + } +} + +- (void)setAnchorTopLeft:(BOOL)newValue +{ + _anchorTopLeft = newValue; + [self setIgnoreAspectRatio:_ignoreAspectRatio]; +} + +@end diff --git a/GPUImage/Source/GPUImageTwoInputCrossTextureSamplingFilter.h b/GPUImage/Source/GPUImageTwoInputCrossTextureSamplingFilter.h new file mode 100644 index 0000000..64eac9d --- /dev/null +++ b/GPUImage/Source/GPUImageTwoInputCrossTextureSamplingFilter.h @@ -0,0 +1,15 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter +{ + GLint texelWidthUniform, texelHeightUniform; + + CGFloat texelWidth, texelHeight; + BOOL hasOverriddenImageSizeFactor; +} + +// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects. +@property(readwrite, nonatomic) CGFloat texelWidth; +@property(readwrite, nonatomic) CGFloat texelHeight; + +@end diff --git a/GPUImage/Source/GPUImageTwoInputCrossTextureSamplingFilter.m b/GPUImage/Source/GPUImageTwoInputCrossTextureSamplingFilter.m new file mode 100644 index 0000000..aa338f8 --- /dev/null +++ b/GPUImage/Source/GPUImageTwoInputCrossTextureSamplingFilter.m @@ -0,0 +1,108 @@ +#import "GPUImageTwoInputCrossTextureSamplingFilter.h" + +NSString *const kGPUImageTwoInputNearbyTexelSamplingVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + attribute vec4 inputTextureCoordinate2; + + uniform float texelWidth; + uniform float texelHeight; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + varying vec2 topTextureCoordinate; + varying vec2 bottomTextureCoordinate; + + varying vec2 textureCoordinate2; + varying vec2 leftTextureCoordinate2; + varying vec2 rightTextureCoordinate2; + varying vec2 topTextureCoordinate2; + varying vec2 bottomTextureCoordinate2; + + void main() + { + gl_Position = position; + + vec2 widthStep = vec2(texelWidth, 0.0); + vec2 heightStep = vec2(0.0, texelHeight); + + textureCoordinate = inputTextureCoordinate.xy; + leftTextureCoordinate = inputTextureCoordinate.xy - widthStep; + rightTextureCoordinate = inputTextureCoordinate.xy + widthStep; + topTextureCoordinate = inputTextureCoordinate.xy - heightStep; + bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep; + + textureCoordinate2 = inputTextureCoordinate2.xy; + leftTextureCoordinate2 = inputTextureCoordinate2.xy - widthStep; + rightTextureCoordinate2 = inputTextureCoordinate2.xy + widthStep; + topTextureCoordinate2 = inputTextureCoordinate2.xy - heightStep; + bottomTextureCoordinate2 = inputTextureCoordinate2.xy + heightStep; + } +); + +@implementation GPUImageTwoInputCrossTextureSamplingFilter + +@synthesize texelWidth = _texelWidth; +@synthesize texelHeight = _texelHeight; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithVertexShaderFromString:kGPUImageTwoInputNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"]; + texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"]; + + return self; +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + if (!hasOverriddenImageSizeFactor) + { + _texelWidth = 1.0 / filterFrameSize.width; + _texelHeight = 1.0 / filterFrameSize.height; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:filterProgram]; + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + glUniform1f(texelWidthUniform, _texelHeight); + glUniform1f(texelHeightUniform, _texelWidth); + } + else + { + glUniform1f(texelWidthUniform, _texelWidth); + glUniform1f(texelHeightUniform, _texelHeight); + } + }); + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setTexelWidth:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelWidth = newValue; + + [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram]; +} + +- (void)setTexelHeight:(CGFloat)newValue; +{ + hasOverriddenImageSizeFactor = YES; + _texelHeight = newValue; + + [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageTwoInputFilter.h b/GPUImage/Source/GPUImageTwoInputFilter.h new file mode 100644 index 0000000..c6bb82e --- /dev/null +++ b/GPUImage/Source/GPUImageTwoInputFilter.h @@ -0,0 +1,22 @@ +#import "GPUImageFilter.h" + +extern NSString *const kGPUImageTwoInputTextureVertexShaderString; + +@interface GPUImageTwoInputFilter : GPUImageFilter +{ + GLint filterSecondTextureCoordinateAttribute; + GLint filterInputTextureUniform2; + GPUImageRotationMode inputRotation2; + GLuint filterSourceTexture2; + CMTime firstFrameTime, secondFrameTime; + + BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo; + BOOL firstFrameCheckDisabled, secondFrameCheckDisabled; + + __unsafe_unretained id secondTextureDelegate; +} + +- (void)disableFirstFrameCheck; +- (void)disableSecondFrameCheck; + +@end diff --git a/GPUImage/Source/GPUImageTwoInputFilter.m b/GPUImage/Source/GPUImageTwoInputFilter.m new file mode 100644 index 0000000..6570277 --- /dev/null +++ b/GPUImage/Source/GPUImageTwoInputFilter.m @@ -0,0 +1,271 @@ +#import "GPUImageTwoInputFilter.h" + +NSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING +( + attribute vec4 position; + attribute vec4 inputTextureCoordinate; + attribute vec4 inputTextureCoordinate2; + + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + void main() + { + gl_Position = position; + textureCoordinate = inputTextureCoordinate.xy; + textureCoordinate2 = inputTextureCoordinate2.xy; + } +); + + +@implementation GPUImageTwoInputFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [self initWithVertexShaderFromString:kGPUImageTwoInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + return self; +} + +- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString; +{ + if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString])) + { + return nil; + } + + inputRotation2 = kGPUImageNoRotation; + + hasSetFirstTexture = NO; + + hasReceivedFirstFrame = NO; + hasReceivedSecondFrame = NO; + firstFrameWasVideo = NO; + secondFrameWasVideo = NO; + firstFrameCheckDisabled = NO; + secondFrameCheckDisabled = NO; + + firstFrameTime = kCMTimeInvalid; + secondFrameTime = kCMTimeInvalid; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + filterSecondTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate2"]; + + filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader + glEnableVertexAttribArray(filterSecondTextureCoordinateAttribute); + }); + + return self; +} + +- (void)initializeAttributes; +{ + [super initializeAttributes]; + [filterProgram addAttribute:@"inputTextureCoordinate2"]; +} + +- (void)disableFirstFrameCheck; +{ + firstFrameCheckDisabled = YES; +} + +- (void)disableSecondFrameCheck; +{ + secondFrameCheckDisabled = YES; +} + +#pragma mark - +#pragma mark Rendering + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + [GPUImageContext setActiveShaderProgram:filterProgram]; + [self setFilterFBO]; + [self setUniformsForProgramAtIndex:0]; + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, sourceTexture); + glUniform1i(filterInputTextureUniform, 2); + + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, filterSourceTexture2); + glUniform1i(filterInputTextureUniform2, 3); + + glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)releaseInputTexturesIfNeeded; +{ + if (shouldConserveMemoryForNextFrame) + { + [firstTextureDelegate textureNoLongerNeededForTarget:self]; + [secondTextureDelegate textureNoLongerNeededForTarget:self]; + shouldConserveMemoryForNextFrame = NO; + } +} + +#pragma mark - +#pragma mark GPUImageInput + +- (NSInteger)nextAvailableTextureIndex; +{ + if (hasSetFirstTexture) + { + return 1; + } + else + { + return 0; + } +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + filterSourceTexture = newInputTexture; + hasSetFirstTexture = YES; + } + else + { + filterSourceTexture2 = newInputTexture; + } +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + [super setInputSize:newSize atIndex:textureIndex]; + + if (CGSizeEqualToSize(newSize, CGSizeZero)) + { + hasSetFirstTexture = NO; + } + } +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + inputRotation = newInputRotation; + } + else + { + inputRotation2 = newInputRotation; + } +} + +- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex; +{ + CGSize rotatedSize = sizeToRotate; + + GPUImageRotationMode rotationToCheck; + if (textureIndex == 0) + { + rotationToCheck = inputRotation; + } + else + { + rotationToCheck = inputRotation2; + } + + if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck)) + { + rotatedSize.width = sizeToRotate.height; + rotatedSize.height = sizeToRotate.width; + } + + return rotatedSize; +} + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + outputTextureRetainCount = [targets count]; + + // You can set up infinite update loops, so this helps to short circuit them + if (hasReceivedFirstFrame && hasReceivedSecondFrame) + { + return; + } + + BOOL updatedMovieFrameOppositeStillImage = NO; + + if (textureIndex == 0) + { + hasReceivedFirstFrame = YES; + firstFrameTime = frameTime; + if (secondFrameCheckDisabled) + { + hasReceivedSecondFrame = YES; + } + + if (!CMTIME_IS_INDEFINITE(frameTime)) + { + if CMTIME_IS_INDEFINITE(secondFrameTime) + { + updatedMovieFrameOppositeStillImage = YES; + } + } + } + else + { + hasReceivedSecondFrame = YES; + secondFrameTime = frameTime; + if (firstFrameCheckDisabled) + { + hasReceivedFirstFrame = YES; + } + + if (!CMTIME_IS_INDEFINITE(frameTime)) + { + if CMTIME_IS_INDEFINITE(firstFrameTime) + { + updatedMovieFrameOppositeStillImage = YES; + } + } + } + + // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled) + if ((hasReceivedFirstFrame && hasReceivedSecondFrame) || updatedMovieFrameOppositeStillImage) + { + CMTime passOnFrameTime = (!CMTIME_IS_INDEFINITE(firstFrameTime)) ? firstFrameTime : secondFrameTime; + [super newFrameReadyAtTime:passOnFrameTime atIndex:0]; // Bugfix when trying to record: always use time from first input (unless indefinite, in which case use the second input) + hasReceivedFirstFrame = NO; + hasReceivedSecondFrame = NO; + } +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + if (textureIndex == 0) + { + firstTextureDelegate = newTextureDelegate; + } + else + { + secondTextureDelegate = newTextureDelegate; + } +} + +@end diff --git a/GPUImage/Source/GPUImageTwoPassFilter.h b/GPUImage/Source/GPUImageTwoPassFilter.h new file mode 100755 index 0000000..b76ab6d --- /dev/null +++ b/GPUImage/Source/GPUImageTwoPassFilter.h @@ -0,0 +1,25 @@ +#import "GPUImageFilter.h" + +@interface GPUImageTwoPassFilter : GPUImageFilter +{ + GLuint secondFilterOutputTexture; + + GLProgram *secondFilterProgram; + GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute; + GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2; + + GLuint secondFilterFramebuffer; + + NSMutableDictionary *secondProgramUniformStateRestorationBlocks; +} + +// Initialization and teardown +- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString; +- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString; +- (void)initializeSecondaryAttributes; +- (void)initializeSecondOutputTextureIfNeeded; + +// Managing the display FBOs +- (void)createSecondFilterFBOofSize:(CGSize)currentFBOSize; + +@end diff --git a/GPUImage/Source/GPUImageTwoPassFilter.m b/GPUImage/Source/GPUImageTwoPassFilter.m new file mode 100755 index 0000000..899626b --- /dev/null +++ b/GPUImage/Source/GPUImageTwoPassFilter.m @@ -0,0 +1,466 @@ +#import "GPUImageTwoPassFilter.h" + +@implementation GPUImageTwoPassFilter + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString; +{ + if (!(self = [super initWithVertexShaderFromString:firstStageVertexShaderString fragmentShaderFromString:firstStageFragmentShaderString])) + { + return nil; + } + + secondProgramUniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10]; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:secondStageVertexShaderString fragmentShaderString:secondStageFragmentShaderString]; + + if (!secondFilterProgram.initialized) + { + [self initializeSecondaryAttributes]; + + if (![secondFilterProgram link]) + { + NSString *progLog = [secondFilterProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [secondFilterProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [secondFilterProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + secondFilterProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"]; + secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"]; + secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader + secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader + + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + + glEnableVertexAttribArray(secondFilterPositionAttribute); + glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute); + }); + + return self; +} + +- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString; +{ + if (!(self = [self initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:kGPUImageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString])) + { + return nil; + } + + return self; +} + +- (void)initializeSecondaryAttributes; +{ + [secondFilterProgram addAttribute:@"position"]; + [secondFilterProgram addAttribute:@"inputTextureCoordinate"]; +} + +#pragma mark - +#pragma mark Managing targets + +- (GLuint)textureForOutput; +{ + return secondFilterOutputTexture; +} + +#pragma mark - +#pragma mark Manage the output texture + +- (void)initializeSecondOutputTextureIfNeeded; +{ + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + return; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (!secondFilterOutputTexture) + { + glGenTextures(1, &secondFilterOutputTexture); + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glBindTexture(GL_TEXTURE_2D, 0); + } + }); +} + +- (void)deleteOutputTexture; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + + if (!([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage)) + { + if (secondFilterOutputTexture) + { + glDeleteTextures(1, &secondFilterOutputTexture); + secondFilterOutputTexture = 0; + } + } + }); +} + +#pragma mark - +#pragma mark Managing the display FBOs + +- (void)createFilterFBOofSize:(CGSize)currentFBOSize; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (!filterFramebuffer) + { + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + preparedToCaptureImage = NO; + [super createFilterFBOofSize:currentFBOSize]; + preparedToCaptureImage = YES; + } + else + { + [super createFilterFBOofSize:currentFBOSize]; + } + } + + [self createSecondFilterFBOofSize:[self sizeOfFBO]]; + }); +} + +- (void)createSecondFilterFBOofSize:(CGSize)currentFBOSize; +{ + glGenFramebuffers(1, &secondFilterFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); + + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &filterTextureCache); +#endif + + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ + + CFDictionaryRef empty; // empty value for attr value. + CFMutableDictionaryRef attrs; + empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary + attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); + CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty); + + err = CVPixelBufferCreate(kCFAllocatorDefault, (int)currentFBOSize.width, (int)currentFBOSize.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget); + if (err) + { + NSLog(@"FBO size: %f, %f", currentFBOSize.width, currentFBOSize.height); + NSAssert(NO, @"Error at CVPixelBufferCreate %d", err); + } + + err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, + filterTextureCache, renderTarget, + NULL, // texture attributes + GL_TEXTURE_2D, + self.outputTextureOptions.internalFormat, // opengl format + (int)currentFBOSize.width, + (int)currentFBOSize.height, + self.outputTextureOptions.format, // native iOS format + self.outputTextureOptions.type, + 0, + &renderTexture); + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + CFRelease(attrs); + CFRelease(empty); + glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); + secondFilterOutputTexture = CVOpenGLESTextureGetName(renderTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.outputTextureOptions.wrapS); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.outputTextureOptions.wrapT); + + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); + + [self notifyTargetsAboutNewOutputTexture]; +#endif + } + else + { + [self initializeSecondOutputTextureIfNeeded]; + glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture); + // if ([self providesMonochromeOutput] && [GPUImageContext deviceSupportsRedTextures]) + // { + // glTexImage2D(GL_TEXTURE_2D, 0, GL_RG_EXT, (int)currentFBOSize.width, (int)currentFBOSize.height, 0, GL_RG_EXT, GL_UNSIGNED_BYTE, 0); + // } + // else + // { + glTexImage2D(GL_TEXTURE_2D, + 0, + self.outputTextureOptions.internalFormat, + (int)currentFBOSize.width, + (int)currentFBOSize.height, + 0, + self.outputTextureOptions.format, + self.outputTextureOptions.type, + 0); + // } + + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, secondFilterOutputTexture, 0); + + [self notifyTargetsAboutNewOutputTexture]; + } + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + + glBindTexture(GL_TEXTURE_2D, 0); + +} + +- (void)recreateFilterFBO +{ + cachedMaximumOutputSize = CGSizeZero; + + [self destroyFilterFBO]; + [self deleteOutputTexture]; +// +// [self setFilterFBO]; +// [self setSecondFilterFBO]; +} + +- (void)destroyFilterFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (filterFramebuffer) + { + glDeleteFramebuffers(1, &filterFramebuffer); + filterFramebuffer = 0; + } + + if (secondFilterFramebuffer) + { + glDeleteFramebuffers(1, &secondFilterFramebuffer); + secondFilterFramebuffer = 0; + } + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE + if (filterTextureCache != NULL) + { + CFRelease(renderTarget); + renderTarget = NULL; + + if (renderTexture) + { + CFRelease(renderTexture); + renderTexture = NULL; + } + + CVOpenGLESTextureCacheFlush(filterTextureCache, 0); + CFRelease(filterTextureCache); + filterTextureCache = NULL; + } +#endif + }); +} + +- (void)setFilterFBO; +{ + CGSize currentFBOSize = [self sizeOfFBO]; + + if (!filterFramebuffer) + { + if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage) + { + preparedToCaptureImage = NO; + [super createFilterFBOofSize:currentFBOSize]; + preparedToCaptureImage = YES; + } + else + { + [super createFilterFBOofSize:currentFBOSize]; + } + [self setupFilterForSize:currentFBOSize]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, filterFramebuffer); + + glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height); +} + +- (void)setSecondFilterFBO; +{ + CGSize currentFBOSize = [self sizeOfFBO]; + if (!secondFilterFramebuffer) + { + [self createFilterFBOofSize:currentFBOSize]; + [self setupFilterForSize:currentFBOSize]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, secondFilterFramebuffer); + glViewport(0, 0, (int)currentFBOSize.width, (int)currentFBOSize.height); +} + +- (void)setOutputFBO; +{ + [self setSecondFilterFBO]; +} + +#pragma mark - +#pragma mark Rendering + +- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture; +{ + if (self.preventRendering) + { + return; + } + + // This assumes that any two-pass filter that says it desires monochrome input is using the first pass for a luminance conversion, which can be dropped + if (!currentlyReceivingMonochromeInput) + { + // Run the first stage of the two-pass filter + [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates sourceTexture:sourceTexture]; + } + + + // Run the second stage of the two-pass filter + [GPUImageContext setActiveShaderProgram:secondFilterProgram]; + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, 0); + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, 0); + [self setSecondFilterFBO]; + + [self setUniformsForProgramAtIndex:1]; + + if (!currentlyReceivingMonochromeInput) + { + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]); + } + else + { + glActiveTexture(GL_TEXTURE3); + glBindTexture(GL_TEXTURE_2D, sourceTexture); + glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + } + + glUniform1i(secondFilterInputTextureUniform, 3); + + glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + // Release the first FBO early + if (shouldConserveMemoryForNextFrame) + { + [firstTextureDelegate textureNoLongerNeededForTarget:self]; + + glDeleteFramebuffers(1, &filterFramebuffer); + filterFramebuffer = 0; + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + + shouldConserveMemoryForNextFrame = NO; + } +} + +// Clear this out because I want to release the input texture as soon as the first pass is finished, not just after the whole rendering has completed +- (void)releaseInputTexturesIfNeeded; +{ +} + +- (void)prepareForImageCapture; +{ + if (preparedToCaptureImage) + { + return; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + preparedToCaptureImage = YES; + + if ([GPUImageContext supportsFastTextureUpload]) + { + if (secondFilterOutputTexture) + { + [GPUImageContext useImageProcessingContext]; + + glDeleteTextures(1, &secondFilterOutputTexture); + secondFilterOutputTexture = 0; + } + } + }); +} + +- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock; +{ +// TODO: Deal with the fact that two-pass filters may have the same shader program identifier + if (shaderProgram == filterProgram) + { + [uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]]; + } + else + { + [secondProgramUniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]]; + } + uniformStateBlock(); +} + +- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex; +{ + if (programIndex == 0) + { + [uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){ + dispatch_block_t currentBlock = obj; + currentBlock(); + }]; + } + else + { + [secondProgramUniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){ + dispatch_block_t currentBlock = obj; + currentBlock(); + }]; + } +} + +@end diff --git a/GPUImage/Source/GPUImageTwoPassTextureSamplingFilter.h b/GPUImage/Source/GPUImageTwoPassTextureSamplingFilter.h new file mode 100644 index 0000000..73ab79d --- /dev/null +++ b/GPUImage/Source/GPUImageTwoPassTextureSamplingFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageTwoPassFilter.h" + +@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter +{ + GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform; + GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset; + CGFloat _verticalTexelSpacing, _horizontalTexelSpacing; +} + +// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0 +@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing; + +@end diff --git a/GPUImage/Source/GPUImageTwoPassTextureSamplingFilter.m b/GPUImage/Source/GPUImageTwoPassTextureSamplingFilter.m new file mode 100644 index 0000000..b6a2ec5 --- /dev/null +++ b/GPUImage/Source/GPUImageTwoPassTextureSamplingFilter.m @@ -0,0 +1,85 @@ +#import "GPUImageTwoPassTextureSamplingFilter.h" + +@implementation GPUImageTwoPassTextureSamplingFilter + +@synthesize verticalTexelSpacing = _verticalTexelSpacing; +@synthesize horizontalTexelSpacing = _horizontalTexelSpacing; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString +{ + if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString])) + { + return nil; + } + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"]; + verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"]; + + horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"]; + horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"]; + }); + + self.verticalTexelSpacing = 1.0; + self.horizontalTexelSpacing = 1.0; + + return self; +} + +- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex; +{ + [super setUniformsForProgramAtIndex:programIndex]; + + if (programIndex == 0) + { + glUniform1f(verticalPassTexelWidthOffsetUniform, verticalPassTexelWidthOffset); + glUniform1f(verticalPassTexelHeightOffsetUniform, verticalPassTexelHeightOffset); + } + else + { + glUniform1f(horizontalPassTexelWidthOffsetUniform, horizontalPassTexelWidthOffset); + glUniform1f(horizontalPassTexelHeightOffsetUniform, horizontalPassTexelHeightOffset); + } +} + +- (void)setupFilterForSize:(CGSize)filterFrameSize; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + // The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + verticalPassTexelWidthOffset = _verticalTexelSpacing / filterFrameSize.height; + verticalPassTexelHeightOffset = 0.0; + } + else + { + verticalPassTexelWidthOffset = 0.0; + verticalPassTexelHeightOffset = _verticalTexelSpacing / filterFrameSize.height; + } + + horizontalPassTexelWidthOffset = _horizontalTexelSpacing / filterFrameSize.width; + horizontalPassTexelHeightOffset = 0.0; + }); +} + +#pragma mark - +#pragma mark Accessors + +- (void)setVerticalTexelSpacing:(CGFloat)newValue; +{ + _verticalTexelSpacing = newValue; + [self setupFilterForSize:[self sizeOfFBO]]; +} + +- (void)setHorizontalTexelSpacing:(CGFloat)newValue; +{ + _horizontalTexelSpacing = newValue; + [self setupFilterForSize:[self sizeOfFBO]]; +} + +@end diff --git a/GPUImage/Source/GPUImageUIElement.h b/GPUImage/Source/GPUImageUIElement.h new file mode 100644 index 0000000..09fe932 --- /dev/null +++ b/GPUImage/Source/GPUImageUIElement.h @@ -0,0 +1,13 @@ +#import "GPUImageOutput.h" + +@interface GPUImageUIElement : GPUImageOutput + +// Initialization and teardown +- (id)initWithView:(UIView *)inputView; +- (id)initWithLayer:(CALayer *)inputLayer; + +// Layer management +- (CGSize)layerSizeInPixels; +- (void)update; + +@end diff --git a/GPUImage/Source/GPUImageUIElement.m b/GPUImage/Source/GPUImageUIElement.m new file mode 100644 index 0000000..3c154a7 --- /dev/null +++ b/GPUImage/Source/GPUImageUIElement.m @@ -0,0 +1,111 @@ +#import "GPUImageUIElement.h" + +@interface GPUImageUIElement () +{ + UIView *view; + CALayer *layer; + + CGSize previousLayerSizeInPixels; + CMTime time; + NSTimeInterval actualTimeOfLastUpdate; +} + +@end + +@implementation GPUImageUIElement + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithView:(UIView *)inputView; +{ + if (!(self = [super init])) + { + return nil; + } + + view = inputView; + layer = inputView.layer; + + previousLayerSizeInPixels = CGSizeZero; + [self update]; + + return self; +} + +- (id)initWithLayer:(CALayer *)inputLayer; +{ + if (!(self = [super init])) + { + return nil; + } + + view = nil; + layer = inputLayer; + + previousLayerSizeInPixels = CGSizeZero; + [self update]; + + return self; +} + +#pragma mark - +#pragma mark Layer management + +- (CGSize)layerSizeInPixels; +{ + CGSize pointSize = layer.bounds.size; + return CGSizeMake(layer.contentsScale * pointSize.width, layer.contentsScale * pointSize.height); +} + +- (void)update; +{ + [GPUImageContext useImageProcessingContext]; + [self initializeOutputTextureIfNeeded]; + + if(CMTIME_IS_INVALID(time)) { + time = CMTimeMakeWithSeconds(0, 600); + actualTimeOfLastUpdate = [NSDate timeIntervalSinceReferenceDate]; + } else { + NSTimeInterval now = [NSDate timeIntervalSinceReferenceDate]; + NSTimeInterval diff = now - actualTimeOfLastUpdate; + time = CMTimeAdd(time, CMTimeMakeWithSeconds(diff, 600)); + actualTimeOfLastUpdate = now; + } + + CGSize layerPixelSize = [self layerSizeInPixels]; + + GLubyte *imageData = (GLubyte *) calloc(1, (int)layerPixelSize.width * (int)layerPixelSize.height * 4); + + CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB(); + CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)layerPixelSize.width, (int)layerPixelSize.height, 8, (int)layerPixelSize.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); +// CGContextRotateCTM(imageContext, M_PI_2); + CGContextTranslateCTM(imageContext, 0.0f, layerPixelSize.height); + CGContextScaleCTM(imageContext, layer.contentsScale, -layer.contentsScale); + // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html + + [layer renderInContext:imageContext]; + + CGContextRelease(imageContext); + CGColorSpaceRelease(genericRGBColorspace); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + // no need to use self.outputTextureOptions here, we always need these texture options + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)layerPixelSize.width, (int)layerPixelSize.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, imageData); + + free(imageData); + + for (id currentTarget in targets) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:layerPixelSize atIndex:textureIndexOfTarget]; + [currentTarget newFrameReadyAtTime:time atIndex:textureIndexOfTarget]; + } + } +} + +@end diff --git a/GPUImage/Source/GPUImageUnsharpMaskFilter.h b/GPUImage/Source/GPUImageUnsharpMaskFilter.h new file mode 100755 index 0000000..9d8aff0 --- /dev/null +++ b/GPUImage/Source/GPUImageUnsharpMaskFilter.h @@ -0,0 +1,16 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageGaussianBlurFilter; + +@interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup +{ + GPUImageGaussianBlurFilter *blurFilter; + GPUImageFilter *unsharpMaskFilter; +} +// The blur radius of the underlying Gaussian blur. The default is 4.0. +@property (readwrite, nonatomic) CGFloat blurRadiusInPixels; + +// The strength of the sharpening, from 0.0 on up, with a default of 1.0 +@property(readwrite, nonatomic) CGFloat intensity; + +@end diff --git a/GPUImage/Source/GPUImageUnsharpMaskFilter.m b/GPUImage/Source/GPUImageUnsharpMaskFilter.m new file mode 100755 index 0000000..542c5ea --- /dev/null +++ b/GPUImage/Source/GPUImageUnsharpMaskFilter.m @@ -0,0 +1,101 @@ +#import "GPUImageUnsharpMaskFilter.h" +#import "GPUImageFilter.h" +#import "GPUImageTwoInputFilter.h" +#import "GPUImageGaussianBlurFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + varying highp vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform highp float intensity; + + void main() + { + lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate); + lowp vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb; + + gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a); +// gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity); +// gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0); + } +); +#else +NSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 textureCoordinate2; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + + uniform float intensity; + + void main() + { + vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate); + vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb; + + gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a); + // gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity); + // gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0); + } +); +#endif + +@implementation GPUImageUnsharpMaskFilter + +@synthesize blurRadiusInPixels; +@synthesize intensity = _intensity; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: apply a variable Gaussian blur + blurFilter = [[GPUImageGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + + // Second pass: combine the blurred image with the original sharp one + unsharpMaskFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageUnsharpMaskFragmentShaderString]; + [self addFilter:unsharpMaskFilter]; + + // Texture location 0 needs to be the sharp image for both the blur and the second stage processing + [blurFilter addTarget:unsharpMaskFilter atTextureLocation:1]; + + self.initialFilters = [NSArray arrayWithObjects:blurFilter, unsharpMaskFilter, nil]; + self.terminalFilter = unsharpMaskFilter; + + self.intensity = 1.0; + self.blurRadiusInPixels = 4.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setIntensity:(CGFloat)newValue; +{ + _intensity = newValue; + [unsharpMaskFilter setFloat:newValue forUniformName:@"intensity"]; +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/GPUImageVideoCamera.h b/GPUImage/Source/GPUImageVideoCamera.h new file mode 100755 index 0000000..3c06edc --- /dev/null +++ b/GPUImage/Source/GPUImageVideoCamera.h @@ -0,0 +1,154 @@ +#import +#import +#import +#import "GPUImageContext.h" +#import "GPUImageOutput.h" + +extern const GLfloat kColorConversion601[]; +extern const GLfloat kColorConversion709[]; +extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString; +extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString; + + +//Delegate Protocal for Face Detection. +@protocol GPUImageVideoCameraDelegate + +@optional +- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer; +@end + + +/** + A GPUImageOutput that provides frames from either camera +*/ +@interface GPUImageVideoCamera : GPUImageOutput +{ + CVOpenGLESTextureCacheRef coreVideoTextureCache; + + NSUInteger numberOfFramesCaptured; + CGFloat totalFrameTimeDuringCapture; + + AVCaptureSession *_captureSession; + AVCaptureDevice *_inputCamera; + AVCaptureDevice *_microphone; + AVCaptureDeviceInput *videoInput; + AVCaptureVideoDataOutput *videoOutput; + + BOOL capturePaused; + GPUImageRotationMode outputRotation; + dispatch_semaphore_t frameRenderingSemaphore; + + BOOL captureAsYUV; + GLuint luminanceTexture, chrominanceTexture; + + __unsafe_unretained id _delegate; +} + +/// The AVCaptureSession used to capture from the camera +@property(readonly, retain, nonatomic) AVCaptureSession *captureSession; + +/// This enables the capture session preset to be changed on the fly +@property (readwrite, nonatomic, copy) NSString *captureSessionPreset; + +/// This sets the frame rate of the camera (iOS 5 and above only) +/** + Setting this to 0 or below will set the frame rate back to the default setting for a particular preset. + */ +@property (readwrite) int32_t frameRate; + +/// Easy way to tell which cameras are present on device +@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent; +@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent; + +/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console +@property(readwrite, nonatomic) BOOL runBenchmark; + +/// Use this property to manage camera settings. Focus point, exposure point, etc. +@property(readonly) AVCaptureDevice *inputCamera; + +/// This determines the rotation applied to the output image, based on the source material +@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation; + +/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO. +@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera; + +@property(nonatomic, assign) id delegate; + +/// @name Initialization and teardown + +/** Begin a capture session + + See AVCaptureSession for acceptable values + + @param sessionPreset Session preset to use + @param cameraPosition Camera to capture from + */ +- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition; + +/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you + can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget + later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added. + */ +- (BOOL)addAudioInputsAndOutputs; + +/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs + were removed, or NO is they hadn't already been added. + */ +- (BOOL)removeAudioInputsAndOutputs; + +/** Tear down the capture session + */ +- (void)removeInputsAndOutputs; + +/// @name Manage the camera video stream + +/** Start camera capturing + */ +- (void)startCameraCapture; + +/** Stop camera capturing + */ +- (void)stopCameraCapture; + +/** Pause camera capturing + */ +- (void)pauseCameraCapture; + +/** Resume camera capturing + */ +- (void)resumeCameraCapture; + +/** Process a video sample + @param sampleBuffer Buffer to process + */ +- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +/** Process an audio sample + @param sampleBuffer Buffer to process + */ +- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +/** Get the position (front, rear) of the source camera + */ +- (AVCaptureDevicePosition)cameraPosition; + +/** Get the AVCaptureConnection of the source camera + */ +- (AVCaptureConnection *)videoCaptureConnection; + +/** This flips between the front and rear cameras + */ +- (void)rotateCamera; + +/// @name Benchmarking + +/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display + */ +- (CGFloat)averageFrameDurationDuringCapture; + +- (void)resetBenchmarkAverage; + ++ (BOOL)isBackFacingCameraPresent; ++ (BOOL)isFrontFacingCameraPresent; + +@end diff --git a/GPUImage/Source/GPUImageVideoCamera.m b/GPUImage/Source/GPUImageVideoCamera.m new file mode 100644 index 0000000..bda9d9f --- /dev/null +++ b/GPUImage/Source/GPUImageVideoCamera.m @@ -0,0 +1,1069 @@ +#import "GPUImageVideoCamera.h" +#import "GPUImageMovieWriter.h" +#import "GPUImageFilter.h" + +// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range) + +// BT.601, which is the standard for SDTV. +const GLfloat kColorConversion601[] = { + 1.164, 1.164, 1.164, + 0.0, -0.392, 2.017, + 1.596, -0.813, 0.0, +}; + +// BT.709, which is the standard for HDTV. +const GLfloat kColorConversion709[] = { + 1.164, 1.164, 1.164, + 0.0, -0.213, 2.112, + 1.793, -0.533, 0.0, +}; + +NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D luminanceTexture; + uniform sampler2D chrominanceTexture; + uniform mediump mat3 colorConversionMatrix; + + void main() + { + mediump vec3 yuv; + lowp vec3 rgb; + + yuv.x = texture2D(luminanceTexture, textureCoordinate).r; + yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5); + rgb = colorConversionMatrix * yuv; + + gl_FragColor = vec4(rgb, 1); + } +); + +NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D luminanceTexture; + uniform sampler2D chrominanceTexture; + uniform mediump mat3 colorConversionMatrix; + + void main() + { + mediump vec3 yuv; + lowp vec3 rgb; + + yuv.x = texture2D(luminanceTexture, textureCoordinate).r; + yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5); + rgb = colorConversionMatrix * yuv; + + gl_FragColor = vec4(rgb, 1); + } +); + + +#pragma mark - +#pragma mark Private methods and instance variables + +@interface GPUImageVideoCamera () +{ + AVCaptureDeviceInput *audioInput; + AVCaptureAudioDataOutput *audioOutput; + NSDate *startingCaptureTime; + + dispatch_queue_t cameraProcessingQueue, audioProcessingQueue; + + GLProgram *yuvConversionProgram; + GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute; + GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform; + GLint yuvConversionMatrixUniform; + GLuint yuvConversionFramebuffer; + const GLfloat *_preferredConversion; + + + int imageBufferWidth, imageBufferHeight; + + BOOL addedAudioInputsDueToEncodingTarget; +} + +- (void)updateOrientationSendToTargets; +- (void)convertYUVToRGBOutput; +- (void)setYUVConversionFBO; + +@end + +@implementation GPUImageVideoCamera + +@synthesize captureSessionPreset = _captureSessionPreset; +@synthesize captureSession = _captureSession; +@synthesize inputCamera = _inputCamera; +@synthesize runBenchmark = _runBenchmark; +@synthesize outputImageOrientation = _outputImageOrientation; +@synthesize delegate = _delegate; +@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera; +@synthesize frameRate = _frameRate; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack])) + { + return nil; + } + + return self; +} + +- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition; +{ + if (!(self = [super init])) + { + return nil; + } + + cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0); + audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0); + + frameRenderingSemaphore = dispatch_semaphore_create(1); + + _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above + _runBenchmark = NO; + capturePaused = NO; + outputRotation = kGPUImageNoRotation; + captureAsYUV = YES; + _preferredConversion = kColorConversion709; + + runSynchronouslyOnVideoProcessingQueue(^{ + + if (captureAsYUV) + { + [GPUImageContext useImageProcessingContext]; +// if ([GPUImageContext deviceSupportsRedTextures]) +// { +// yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString]; +// } +// else +// { + yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString]; +// } + + if (!yuvConversionProgram.initialized) + { + [yuvConversionProgram addAttribute:@"position"]; + [yuvConversionProgram addAttribute:@"inputTextureCoordinate"]; + + if (![yuvConversionProgram link]) + { + NSString *progLog = [yuvConversionProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [yuvConversionProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [yuvConversionProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + yuvConversionProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"]; + yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"]; + yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"]; + yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"]; + yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"]; + + [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; + + glEnableVertexAttribArray(yuvConversionPositionAttribute); + glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute); + } + + if ([GPUImageContext supportsFastTextureUpload]) + { + [GPUImageContext useImageProcessingContext]; +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache); +#endif + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Need to remove the initially created texture +// [self deleteOutputTexture]; + } + else + { + [self initializeOutputTextureIfNeeded]; + } + }); + + // Grab the back-facing or front-facing camera + _inputCamera = nil; + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in devices) + { + if ([device position] == cameraPosition) + { + _inputCamera = device; + } + } + + if (!_inputCamera) { + return nil; + } + + // Create the capture session + _captureSession = [[AVCaptureSession alloc] init]; + + [_captureSession beginConfiguration]; + + // Add the video input + NSError *error = nil; + videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error]; + if ([_captureSession canAddInput:videoInput]) + { + [_captureSession addInput:videoInput]; + } + + // Add the video frame output + videoOutput = [[AVCaptureVideoDataOutput alloc] init]; + [videoOutput setAlwaysDiscardsLateVideoFrames:NO]; + +// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + if (captureAsYUV && [GPUImageContext supportsFastTextureUpload]) + { + BOOL supportsFullYUVRange = NO; + NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes; + for (NSNumber *currentPixelFormat in supportedPixelFormats) + { + if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + { + supportsFullYUVRange = YES; + } + } + + if (supportsFullYUVRange) + { + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + else + { + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + } + else + { + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + + [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue]; + if ([_captureSession canAddOutput:videoOutput]) + { + [_captureSession addOutput:videoOutput]; + } + else + { + NSLog(@"Couldn't add video output"); + return nil; + } + + _captureSessionPreset = sessionPreset; + [_captureSession setSessionPreset:_captureSessionPreset]; + +// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset +// AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; +// +// if (conn.supportsVideoMinFrameDuration) +// conn.videoMinFrameDuration = CMTimeMake(1,60); +// if (conn.supportsVideoMaxFrameDuration) +// conn.videoMaxFrameDuration = CMTimeMake(1,60); + + [_captureSession commitConfiguration]; + + return self; +} + +- (void)dealloc +{ + [self stopCameraCapture]; + [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()]; + [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()]; + + [self removeInputsAndOutputs]; + + if ([GPUImageContext supportsFastTextureUpload]) + { + CFRelease(coreVideoTextureCache); + } + +// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required. +#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) ) + if (frameRenderingSemaphore != NULL) + { + dispatch_release(frameRenderingSemaphore); + } +#endif + +// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + if (captureAsYUV && [GPUImageContext supportsFastTextureUpload]) + { + [self destroyYUVConversionFBO]; + } +} + +- (BOOL)addAudioInputsAndOutputs +{ + if (audioOutput) + return NO; + + [_captureSession beginConfiguration]; + + _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; + audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil]; + if ([_captureSession canAddInput:audioInput]) + { + [_captureSession addInput:audioInput]; + } + audioOutput = [[AVCaptureAudioDataOutput alloc] init]; + + if ([_captureSession canAddOutput:audioOutput]) + { + [_captureSession addOutput:audioOutput]; + } + else + { + NSLog(@"Couldn't add audio output"); + } + [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue]; + + [_captureSession commitConfiguration]; + return YES; +} + +- (BOOL)removeAudioInputsAndOutputs +{ + if (!audioOutput) + return NO; + + [_captureSession beginConfiguration]; + [_captureSession removeInput:audioInput]; + [_captureSession removeOutput:audioOutput]; + audioInput = nil; + audioOutput = nil; + _microphone = nil; + [_captureSession commitConfiguration]; + return YES; +} + +- (void)removeInputsAndOutputs; +{ + [_captureSession beginConfiguration]; + if (videoInput) { + [_captureSession removeInput:videoInput]; + [_captureSession removeOutput:videoOutput]; + videoInput = nil; + videoOutput = nil; + } + if (_microphone != nil) + { + [_captureSession removeInput:audioInput]; + [_captureSession removeOutput:audioOutput]; + audioInput = nil; + audioOutput = nil; + _microphone = nil; + } + [_captureSession commitConfiguration]; +} + +#pragma mark - +#pragma mark Managing targets + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + [super addTarget:newTarget atTextureLocation:textureLocation]; + + [newTarget setInputRotation:outputRotation atIndex:textureLocation]; +} + +#pragma mark - +#pragma mark Manage the camera video stream + +- (void)startCameraCapture; +{ + if (![_captureSession isRunning]) + { + startingCaptureTime = [NSDate date]; + [_captureSession startRunning]; + }; +} + +- (void)stopCameraCapture; +{ + if ([_captureSession isRunning]) + { + [_captureSession stopRunning]; + } +} + +- (void)pauseCameraCapture; +{ + capturePaused = YES; +} + +- (void)resumeCameraCapture; +{ + capturePaused = NO; +} + +- (void)rotateCamera +{ + if (self.frontFacingCameraPresent == NO) + return; + + NSError *error; + AVCaptureDeviceInput *newVideoInput; + AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position]; + + if (currentCameraPosition == AVCaptureDevicePositionBack) + { + currentCameraPosition = AVCaptureDevicePositionFront; + } + else + { + currentCameraPosition = AVCaptureDevicePositionBack; + } + + AVCaptureDevice *backFacingCamera = nil; + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in devices) + { + if ([device position] == currentCameraPosition) + { + backFacingCamera = device; + } + } + newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error]; + + if (newVideoInput != nil) + { + [_captureSession beginConfiguration]; + + [_captureSession removeInput:videoInput]; + if ([_captureSession canAddInput:newVideoInput]) + { + [_captureSession addInput:newVideoInput]; + videoInput = newVideoInput; + } + else + { + [_captureSession addInput:videoInput]; + } + //captureSession.sessionPreset = oriPreset; + [_captureSession commitConfiguration]; + } + + _inputCamera = backFacingCamera; + [self setOutputImageOrientation:_outputImageOrientation]; +} + +- (AVCaptureDevicePosition)cameraPosition +{ + return [[videoInput device] position]; +} + ++ (BOOL)isBackFacingCameraPresent; +{ + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + for (AVCaptureDevice *device in devices) + { + if ([device position] == AVCaptureDevicePositionBack) + return YES; + } + + return NO; +} + +- (BOOL)isBackFacingCameraPresent +{ + return [GPUImageVideoCamera isBackFacingCameraPresent]; +} + ++ (BOOL)isFrontFacingCameraPresent; +{ + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + for (AVCaptureDevice *device in devices) + { + if ([device position] == AVCaptureDevicePositionFront) + return YES; + } + + return NO; +} + +- (BOOL)isFrontFacingCameraPresent +{ + return [GPUImageVideoCamera isFrontFacingCameraPresent]; +} + +- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset; +{ + [_captureSession beginConfiguration]; + + _captureSessionPreset = captureSessionPreset; + [_captureSession setSessionPreset:_captureSessionPreset]; + + [_captureSession commitConfiguration]; +} + +- (void)setFrameRate:(int32_t)frameRate; +{ + _frameRate = frameRate; + + if (_frameRate > 0) + { + if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] && + [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) { + + NSError *error; + [_inputCamera lockForConfiguration:&error]; + if (error == nil) { +#if defined(__IPHONE_7_0) + [_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)]; + [_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)]; +#endif + } + [_inputCamera unlockForConfiguration]; + + } else { + + for (AVCaptureConnection *connection in videoOutput.connections) + { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)]) + connection.videoMinFrameDuration = CMTimeMake(1, _frameRate); + + if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)]) + connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate); +#pragma clang diagnostic pop + } + } + + } + else + { + if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] && + [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) { + + NSError *error; + [_inputCamera lockForConfiguration:&error]; + if (error == nil) { +#if defined(__IPHONE_7_0) + [_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid]; + [_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid]; +#endif + } + [_inputCamera unlockForConfiguration]; + + } else { + + for (AVCaptureConnection *connection in videoOutput.connections) + { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)]) + connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default + + if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)]) + connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default +#pragma clang diagnostic pop + } + } + + } +} + +- (int32_t)frameRate; +{ + return _frameRate; +} + +- (AVCaptureConnection *)videoCaptureConnection { + for (AVCaptureConnection *connection in [videoOutput connections] ) { + for ( AVCaptureInputPort *port in [connection inputPorts] ) { + if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) { + return connection; + } + } + } + + return nil; +} + +#define INITIALFRAMESTOIGNOREFORBENCHMARK 5 + +- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime; +{ + for (id currentTarget in targets) + { + if ([currentTarget enabled]) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + if (currentTarget != self.targetToIgnoreForUpdates) + { + [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget]; + [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget]; + + if ([currentTarget wantsMonochromeInput] && captureAsYUV) + { + [currentTarget setCurrentlyReceivingMonochromeInput:YES]; + [currentTarget setInputTexture:luminanceTexture atIndex:textureIndexOfTarget]; + } + else + { + [currentTarget setCurrentlyReceivingMonochromeInput:NO]; + [currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget]; + } + + [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget]; + } + else + { + [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget]; + [currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget]; + } + } + } +} + +- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; +{ + if (capturePaused) + { + return; + } + + CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer); + int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame); + int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame); + CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL); + if (colorAttachments == kCVImageBufferYCbCrMatrix_ITU_R_601_4) { + _preferredConversion = kColorConversion601; + } + else { + _preferredConversion = kColorConversion709; + } + + CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + + [GPUImageContext useImageProcessingContext]; + + if ([GPUImageContext supportsFastTextureUpload]) + { + CVOpenGLESTextureRef luminanceTextureRef = NULL; + CVOpenGLESTextureRef chrominanceTextureRef = NULL; + CVOpenGLESTextureRef texture = NULL; + +// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion + { + + if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) ) + { + imageBufferWidth = bufferWidth; + imageBufferHeight = bufferHeight; + + [self destroyYUVConversionFBO]; + [self createYUVConversionFBO]; + } + + CVReturn err; + // Y-plane + glActiveTexture(GL_TEXTURE4); + if ([GPUImageContext deviceSupportsRedTextures]) + { +// err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); + } + else + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); + } + if (err) + { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef); + glBindTexture(GL_TEXTURE_2D, luminanceTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + // UV-plane + glActiveTexture(GL_TEXTURE5); + if ([GPUImageContext deviceSupportsRedTextures]) + { +// err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); + } + else + { + err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); + } + if (err) + { + NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); + } + + chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef); + glBindTexture(GL_TEXTURE_2D, chrominanceTexture); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + if (!allTargetsWantMonochromeData) + { + [self convertYUVToRGBOutput]; + } + + [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime]; + + CVPixelBufferUnlockBaseAddress(cameraFrame, 0); + CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0); + CFRelease(luminanceTextureRef); + CFRelease(chrominanceTextureRef); + } + else + { + CVPixelBufferLockBaseAddress(cameraFrame, 0); + + CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture); + + if (!texture || err) { + NSLog(@"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err); + NSAssert(NO, @"Camera failure"); + return; + } + + outputTexture = CVOpenGLESTextureGetName(texture); + // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime]; + + CVPixelBufferUnlockBaseAddress(cameraFrame, 0); + CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0); + CFRelease(texture); + + outputTexture = 0; + } + + + if (_runBenchmark) + { + numberOfFramesCaptured++; + if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK) + { + CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); + totalFrameTimeDuringCapture += currentFrameTime; + NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]); + NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime); + } + } + } + else + { + CVPixelBufferLockBaseAddress(cameraFrame, 0); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + + // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame)); + + // Using BGRA extension to pull in video frame data directly + // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera + int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame)); + + for (id currentTarget in targets) + { + if ([currentTarget enabled]) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget]; + [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget]; + } + } + } + + CVPixelBufferUnlockBaseAddress(cameraFrame, 0); + + if (_runBenchmark) + { + numberOfFramesCaptured++; + if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK) + { + CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); + totalFrameTimeDuringCapture += currentFrameTime; + } + } + } +} + +- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; +{ + [self.audioEncodingTarget processAudioBuffer:sampleBuffer]; +} + +- (void)convertYUVToRGBOutput; +{ + [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; + [self setYUVConversionFBO]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat textureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, luminanceTexture); + glUniform1i(yuvConversionLuminanceTextureUniform, 4); + + glActiveTexture(GL_TEXTURE5); + glBindTexture(GL_TEXTURE_2D, chrominanceTexture); + glUniform1i(yuvConversionChrominanceTextureUniform, 5); + + glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion); + + glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)setYUVConversionFBO; +{ + if (!yuvConversionFramebuffer) + { + [self createYUVConversionFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, yuvConversionFramebuffer); + + glViewport(0, 0, imageBufferWidth, imageBufferHeight); +} + +- (void)createYUVConversionFBO; +{ + [self initializeOutputTextureIfNeeded]; + + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &yuvConversionFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, yuvConversionFramebuffer); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageBufferWidth, imageBufferHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + [self notifyTargetsAboutNewOutputTexture]; + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + glBindTexture(GL_TEXTURE_2D, 0); + +} + +- (void)destroyYUVConversionFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (yuvConversionFramebuffer) + { + glDeleteFramebuffers(1, &yuvConversionFramebuffer); + yuvConversionFramebuffer = 0; + } + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } + }); +} + + +#pragma mark - +#pragma mark Benchmarking + +- (CGFloat)averageFrameDurationDuringCapture; +{ + return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0; +} + +- (void)resetBenchmarkAverage; +{ + numberOfFramesCaptured = 0; + totalFrameTimeDuringCapture = 0.0; +} + +#pragma mark - +#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate + +- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ + if (!self.captureSession.isRunning) + { + return; + } + else if (captureOutput == audioOutput) + { + [self processAudioSampleBuffer:sampleBuffer]; + } + else + { + if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) + { + return; + } + + CFRetain(sampleBuffer); + runAsynchronouslyOnVideoProcessingQueue(^{ + //Feature Detection Hook. + if (self.delegate) + { + [self.delegate willOutputSampleBuffer:sampleBuffer]; + } + + [self processVideoSampleBuffer:sampleBuffer]; + + CFRelease(sampleBuffer); + dispatch_semaphore_signal(frameRenderingSemaphore); + }); + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue; +{ + if (newValue) { + /* Add audio inputs and outputs, if necessary */ + addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs]; + } else if (addedAudioInputsDueToEncodingTarget) { + /* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */ + [self removeAudioInputsAndOutputs]; + addedAudioInputsDueToEncodingTarget = NO; + } + + [super setAudioEncodingTarget:newValue]; +} + +- (void)updateOrientationSendToTargets; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + + // From the iOS 5.0 release notes: + // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight. + + if ([self cameraPosition] == AVCaptureDevicePositionBack) + { + if (_horizontallyMirrorRearFacingCamera) + { + switch(_outputImageOrientation) + { + case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break; + case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break; + case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break; + case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break; + } + } + else + { + switch(_outputImageOrientation) + { + case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break; + case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break; + case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break; + case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break; + } + } + } + else + { + if (_horizontallyMirrorFrontFacingCamera) + { + switch(_outputImageOrientation) + { + case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break; + case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break; + case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break; + case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break; + } + } + else + { + switch(_outputImageOrientation) + { + case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break; + case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break; + case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break; + case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break; + } + } + } + + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]]; + } + }); +} + +- (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue; +{ + _outputImageOrientation = newValue; + [self updateOrientationSendToTargets]; +} + +- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue +{ + _horizontallyMirrorFrontFacingCamera = newValue; + [self updateOrientationSendToTargets]; +} + +- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue +{ + _horizontallyMirrorRearFacingCamera = newValue; + [self updateOrientationSendToTargets]; +} + +@end diff --git a/GPUImage/Source/GPUImageVignetteFilter.h b/GPUImage/Source/GPUImageVignetteFilter.h new file mode 100755 index 0000000..37be944 --- /dev/null +++ b/GPUImage/Source/GPUImageVignetteFilter.h @@ -0,0 +1,22 @@ +#import "GPUImageFilter.h" + +/** Performs a vignetting effect, fading out the image at the edges + */ +@interface GPUImageVignetteFilter : GPUImageFilter +{ + GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform; +} + +// the center for the vignette in tex coords (defaults to 0.5, 0.5) +@property (nonatomic, readwrite) CGPoint vignetteCenter; + +// The color to use for the Vignette (defaults to black) +@property (nonatomic, readwrite) GPUVector3 vignetteColor; + +// The normalized distance from the center where the vignette effect starts. Default of 0.5. +@property (nonatomic, readwrite) CGFloat vignetteStart; + +// The normalized distance from the center where the vignette effect ends. Default of 0.75. +@property (nonatomic, readwrite) CGFloat vignetteEnd; + +@end diff --git a/GPUImage/Source/GPUImageVignetteFilter.m b/GPUImage/Source/GPUImageVignetteFilter.m new file mode 100755 index 0000000..6e1eadb --- /dev/null +++ b/GPUImage/Source/GPUImageVignetteFilter.m @@ -0,0 +1,104 @@ +#import "GPUImageVignetteFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + varying highp vec2 textureCoordinate; + + uniform lowp vec2 vignetteCenter; + uniform lowp vec3 vignetteColor; + uniform highp float vignetteStart; + uniform highp float vignetteEnd; + + void main() + { + lowp vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate); + lowp float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y)); + lowp float percent = smoothstep(vignetteStart, vignetteEnd, d); + gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a); + } +); +#else +NSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + varying vec2 textureCoordinate; + + uniform vec2 vignetteCenter; + uniform vec3 vignetteColor; + uniform float vignetteStart; + uniform float vignetteEnd; + + void main() + { + vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate); + float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y)); + float percent = smoothstep(vignetteStart, vignetteEnd, d); + gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a); + } +); +#endif + +@implementation GPUImageVignetteFilter + +@synthesize vignetteCenter = _vignetteCenter; +@synthesize vignetteColor = _vignetteColor; +@synthesize vignetteStart =_vignetteStart; +@synthesize vignetteEnd = _vignetteEnd; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageVignetteFragmentShaderString])) + { + return nil; + } + + vignetteCenterUniform = [filterProgram uniformIndex:@"vignetteCenter"]; + vignetteColorUniform = [filterProgram uniformIndex:@"vignetteColor"]; + vignetteStartUniform = [filterProgram uniformIndex:@"vignetteStart"]; + vignetteEndUniform = [filterProgram uniformIndex:@"vignetteEnd"]; + + self.vignetteCenter = (CGPoint){ 0.5f, 0.5f }; + self.vignetteColor = (GPUVector3){ 0.0f, 0.0f, 0.0f }; + self.vignetteStart = 0.3; + self.vignetteEnd = 0.75; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setVignetteCenter:(CGPoint)newValue +{ + _vignetteCenter = newValue; + + [self setPoint:newValue forUniform:vignetteCenterUniform program:filterProgram]; +} + +- (void)setVignetteColor:(GPUVector3)newValue +{ + _vignetteColor = newValue; + + [self setVec3:newValue forUniform:vignetteColorUniform program:filterProgram]; +} + +- (void)setVignetteStart:(CGFloat)newValue; +{ + _vignetteStart = newValue; + + [self setFloat:_vignetteStart forUniform:vignetteStartUniform program:filterProgram]; +} + +- (void)setVignetteEnd:(CGFloat)newValue; +{ + _vignetteEnd = newValue; + + [self setFloat:_vignetteEnd forUniform:vignetteEndUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageVoronoiConsumerFilter.h b/GPUImage/Source/GPUImageVoronoiConsumerFilter.h new file mode 100644 index 0000000..659e39d --- /dev/null +++ b/GPUImage/Source/GPUImageVoronoiConsumerFilter.h @@ -0,0 +1,10 @@ +#import "GPUImageTwoInputFilter.h" + +@interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter +{ + GLint sizeUniform; +} + +@property (nonatomic, readwrite) CGSize sizeInPixels; + +@end diff --git a/GPUImage/Source/GPUImageVoronoiConsumerFilter.m b/GPUImage/Source/GPUImageVoronoiConsumerFilter.m new file mode 100644 index 0000000..c12c34f --- /dev/null +++ b/GPUImage/Source/GPUImageVoronoiConsumerFilter.m @@ -0,0 +1,94 @@ +#import "GPUImageVoronoiConsumerFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING +( + + precision highp float; + + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + uniform vec2 size; + varying vec2 textureCoordinate; + + vec2 getCoordFromColor(vec4 color) +{ + float z = color.z * 256.0; + float yoff = floor(z / 8.0); + float xoff = mod(z, 8.0); + float x = color.x*256.0 + xoff*256.0; + float y = color.y*256.0 + yoff*256.0; + return vec2(x,y) / size; +} + + void main(void) { + vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate); + vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc)); + + gl_FragColor = color; + } +); +#else +NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + uniform sampler2D inputImageTexture2; + uniform vec2 size; + varying vec2 textureCoordinate; + + vec2 getCoordFromColor(vec4 color) + { + float z = color.z * 256.0; + float yoff = floor(z / 8.0); + float xoff = mod(z, 8.0); + float x = color.x*256.0 + xoff*256.0; + float y = color.y*256.0 + yoff*256.0; + return vec2(x,y) / size; + } + + void main(void) + { + vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate); + vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc)); + + gl_FragColor = color; + } +); +#endif + +@implementation GPUImageVoronoiConsumerFilter + +@synthesize sizeInPixels = _sizeInPixels; + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageVoronoiConsumerFragmentShaderString])) + { + return nil; + } + + sizeUniform = [filterProgram uniformIndex:@"size"]; + + return self; +} + +-(void)setSizeInPixels:(CGSize)sizeInPixels { + _sizeInPixels = sizeInPixels; + + //validate that it's a power of 2 and square + + float width = log2(sizeInPixels.width); + float height = log2(sizeInPixels.height); + + if (width != height) { + NSLog(@"Voronoi point texture must be square"); + return; + } + if (width != floor(width) || height != floor(height)) { + NSLog(@"Voronoi point texture must be a power of 2. Texture size %f, %f", sizeInPixels.width, sizeInPixels.height); + return; + } + glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height); +} + +@end diff --git a/GPUImage/Source/GPUImageWeakPixelInclusionFilter.h b/GPUImage/Source/GPUImageWeakPixelInclusionFilter.h new file mode 100644 index 0000000..44b76c6 --- /dev/null +++ b/GPUImage/Source/GPUImageWeakPixelInclusionFilter.h @@ -0,0 +1,5 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +@interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter + +@end diff --git a/GPUImage/Source/GPUImageWeakPixelInclusionFilter.m b/GPUImage/Source/GPUImageWeakPixelInclusionFilter.m new file mode 100644 index 0000000..4e95ad5 --- /dev/null +++ b/GPUImage/Source/GPUImageWeakPixelInclusionFilter.m @@ -0,0 +1,94 @@ +#import "GPUImageWeakPixelInclusionFilter.h" + +@implementation GPUImageWeakPixelInclusionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING +( + precision lowp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r; + + float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity; + float sumTest = step(1.5, pixelIntensitySum); + float pixelTest = step(0.01, centerIntensity); + + gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0); + } +); +#else +NSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r; + + float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity; + float sumTest = step(1.5, pixelIntensitySum); + float pixelTest = step(0.01, centerIntensity); + + gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageWeakPixelInclusionFragmentShaderString])) + { + return nil; + } + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageWhiteBalanceFilter.h b/GPUImage/Source/GPUImageWhiteBalanceFilter.h new file mode 100644 index 0000000..eafd65a --- /dev/null +++ b/GPUImage/Source/GPUImageWhiteBalanceFilter.h @@ -0,0 +1,17 @@ +#import "GPUImageFilter.h" +/** + * Created by Alaric Cole + * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it. + + */ +@interface GPUImageWhiteBalanceFilter : GPUImageFilter +{ + GLint temperatureUniform, tintUniform; +} +//choose color temperature, in degrees Kelvin +@property(readwrite, nonatomic) int temperature; + +//adjust tint to compensate +@property(readwrite, nonatomic) int tint; + +@end diff --git a/GPUImage/Source/GPUImageWhiteBalanceFilter.m b/GPUImage/Source/GPUImageWhiteBalanceFilter.m new file mode 100644 index 0000000..af4b4ba --- /dev/null +++ b/GPUImage/Source/GPUImageWhiteBalanceFilter.m @@ -0,0 +1,107 @@ +#import "GPUImageWhiteBalanceFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING +( +uniform sampler2D inputImageTexture; +varying highp vec2 textureCoordinate; + +uniform lowp float temperature; +uniform lowp float tint; + +const lowp vec3 warmFilter = vec3(0.93, 0.54, 0.0); + +const mediump mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311); +const mediump mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702); + +void main() +{ + lowp vec4 source = texture2D(inputImageTexture, textureCoordinate); + + mediump vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint + yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226); + lowp vec3 rgb = YIQtoRGB * yiq; + + lowp vec3 processed = vec3( + (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature + (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))), + (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b)))); + + gl_FragColor = vec4(mix(rgb, processed, temperature), source.a); +} +); +#else +NSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING +( + uniform sampler2D inputImageTexture; + varying vec2 textureCoordinate; + + uniform float temperature; + uniform float tint; + + const vec3 warmFilter = vec3(0.93, 0.54, 0.0); + + const mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311); + const mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702); + + void main() +{ + vec4 source = texture2D(inputImageTexture, textureCoordinate); + + vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint + yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226); + vec3 rgb = YIQtoRGB * yiq; + + vec3 processed = vec3( + (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature + (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))), + (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b)))); + + gl_FragColor = vec4(mix(rgb, processed, temperature), source.a); +} +); +#endif + +@implementation GPUImageWhiteBalanceFilter + +@synthesize temperature = _temperature; +@synthesize tint = _tint; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageWhiteBalanceFragmentShaderString])) + { + return nil; + } + + temperatureUniform = [filterProgram uniformIndex:@"temperature"]; + tintUniform = [filterProgram uniformIndex:@"tint"]; + + self.temperature = 5000.0; + self.tint = 0.0; + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setTemperature:(int)newValue; +{ + _temperature = newValue; + + [self setFloat:_temperature < 5000 ? 0.0004 * (float)(_temperature-5000.0) : 0.00006 * (float)(_temperature-5000.0) forUniform:temperatureUniform program:filterProgram]; +} + +- (void)setTint:(int)newValue; +{ + _tint = newValue; + + [self setFloat:(float)(_tint) / 100.0 forUniform:tintUniform program:filterProgram]; +} + +@end + diff --git a/GPUImage/Source/GPUImageXYDerivativeFilter.h b/GPUImage/Source/GPUImageXYDerivativeFilter.h new file mode 100755 index 0000000..8db5745 --- /dev/null +++ b/GPUImage/Source/GPUImageXYDerivativeFilter.h @@ -0,0 +1,5 @@ +#import "GPUImageSobelEdgeDetectionFilter.h" + +@interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter + +@end diff --git a/GPUImage/Source/GPUImageXYDerivativeFilter.m b/GPUImage/Source/GPUImageXYDerivativeFilter.m new file mode 100755 index 0000000..7e19e9d --- /dev/null +++ b/GPUImage/Source/GPUImageXYDerivativeFilter.m @@ -0,0 +1,106 @@ +#import "GPUImageXYDerivativeFilter.h" + +// I'm using the Prewitt operator to obtain the derivative, then squaring the X and Y components and placing the product of the two in Z. +// In tests, Prewitt seemed to be tied with Sobel for the best, and it's just a little cheaper to compute. +// This is primarily intended to be used with corner detection filters. + +@implementation GPUImageXYDerivativeFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING +( + precision highp float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float edgeStrength; + + void main() + { + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + + float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity; + float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity; + verticalDerivative = verticalDerivative * edgeStrength; + horizontalDerivative = horizontalDerivative * edgeStrength; + + // Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter + gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0); + } +); +#else +NSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform float edgeStrength; + + void main() + { + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + + float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity; + float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity; + verticalDerivative = verticalDerivative * edgeStrength; + horizontalDerivative = horizontalDerivative * edgeStrength; + + // Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter + gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [self initWithFragmentShaderFromString:kGPUImageGradientFragmentShaderString])) + { + return nil; + } + + self.edgeStrength = 1.0; + + return self; +} + +@end diff --git a/GPUImage/Source/GPUImageZoomBlurFilter.h b/GPUImage/Source/GPUImageZoomBlurFilter.h new file mode 100644 index 0000000..744a72c --- /dev/null +++ b/GPUImage/Source/GPUImageZoomBlurFilter.h @@ -0,0 +1,13 @@ +#import "GPUImageFilter.h" + +@interface GPUImageZoomBlurFilter : GPUImageFilter + +/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0 + */ +@property (readwrite, nonatomic) CGFloat blurSize; + +/** The normalized center of the blur. (0.5, 0.5) by default + */ +@property (readwrite, nonatomic) CGPoint blurCenter; + +@end diff --git a/GPUImage/Source/GPUImageZoomBlurFilter.m b/GPUImage/Source/GPUImageZoomBlurFilter.m new file mode 100644 index 0000000..2ae8493 --- /dev/null +++ b/GPUImage/Source/GPUImageZoomBlurFilter.m @@ -0,0 +1,115 @@ +#import "GPUImageZoomBlurFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform highp vec2 blurCenter; + uniform highp float blurSize; + + void main() + { + // TODO: Do a more intelligent scaling based on resolution here + highp vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize; + + lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) * 0.12; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) * 0.12; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05; + + gl_FragColor = fragmentColor; + } +); +#else +NSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + uniform vec2 blurCenter; + uniform float blurSize; + + void main() + { + // TODO: Do a more intelligent scaling based on resolution here + vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize; + + vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) * 0.12; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09; + fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) * 0.12; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09; + fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05; + + gl_FragColor = fragmentColor; + } +); +#endif + +@interface GPUImageZoomBlurFilter() +{ + GLint blurSizeUniform, blurCenterUniform; +} +@end + +@implementation GPUImageZoomBlurFilter + +@synthesize blurSize = _blurSize; +@synthesize blurCenter = _blurCenter; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageZoomBlurFragmentShaderString])) + { + return nil; + } + + blurSizeUniform = [filterProgram uniformIndex:@"blurSize"]; + blurCenterUniform = [filterProgram uniformIndex:@"blurCenter"]; + + self.blurSize = 1.0; + self.blurCenter = CGPointMake(0.5, 0.5); + + return self; +} + +#pragma mark - +#pragma mark Accessors + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + [super setInputRotation:newInputRotation atIndex:textureIndex]; + [self setBlurCenter:self.blurCenter]; +} + +- (void)setBlurSize:(CGFloat)newValue; +{ + _blurSize = newValue; + + [self setFloat:_blurSize forUniform:blurSizeUniform program:filterProgram]; +} + +- (void)setBlurCenter:(CGPoint)newValue; +{ + _blurCenter = newValue; + + CGPoint rotatedPoint = [self rotatedPoint:_blurCenter forRotation:inputRotation]; + [self setPoint:rotatedPoint forUniform:blurCenterUniform program:filterProgram]; +} + +@end diff --git a/GPUImage/Source/GPUImageiOSBlurFilter.h b/GPUImage/Source/GPUImageiOSBlurFilter.h new file mode 100644 index 0000000..da1c566 --- /dev/null +++ b/GPUImage/Source/GPUImageiOSBlurFilter.h @@ -0,0 +1,26 @@ +#import "GPUImageFilterGroup.h" + +@class GPUImageSaturationFilter; +@class GPUImageGaussianBlurFilter; +@class GPUImageLuminanceRangeFilter; + +@interface GPUImageiOSBlurFilter : GPUImageFilterGroup +{ + GPUImageSaturationFilter *saturationFilter; + GPUImageGaussianBlurFilter *blurFilter; + GPUImageLuminanceRangeFilter *luminanceRangeFilter; +} + +/** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function. + */ +@property (readwrite, nonatomic) CGFloat blurRadiusInPixels; + +/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level + */ +@property (readwrite, nonatomic) CGFloat saturation; + +/** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0 + */ +@property (readwrite, nonatomic) CGFloat downsampling; + +@end diff --git a/GPUImage/Source/GPUImageiOSBlurFilter.m b/GPUImage/Source/GPUImageiOSBlurFilter.m new file mode 100644 index 0000000..ed31e0d --- /dev/null +++ b/GPUImage/Source/GPUImageiOSBlurFilter.m @@ -0,0 +1,103 @@ +#import "GPUImageiOSBlurFilter.h" +#import "GPUImageSaturationFilter.h" +#import "GPUImageGaussianBlurFilter.h" +#import "GPUImageLuminanceRangeFilter.h" + +@implementation GPUImageiOSBlurFilter + +@synthesize blurRadiusInPixels; +@synthesize saturation; +@synthesize downsampling = _downsampling; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + // First pass: downsample and desaturate + saturationFilter = [[GPUImageSaturationFilter alloc] init]; + [self addFilter:saturationFilter]; + + // Second pass: apply a strong Gaussian blur + blurFilter = [[GPUImageGaussianBlurFilter alloc] init]; + [self addFilter:blurFilter]; + + // Third pass: upsample and adjust luminance range + luminanceRangeFilter = [[GPUImageLuminanceRangeFilter alloc] init]; + [self addFilter:luminanceRangeFilter]; + + [saturationFilter addTarget:blurFilter]; + [blurFilter addTarget:luminanceRangeFilter]; + + self.initialFilters = [NSArray arrayWithObject:saturationFilter]; + self.terminalFilter = luminanceRangeFilter; + + self.blurRadiusInPixels = 12.0; + self.saturation = 0.8; + self.downsampling = 4.0; + + return self; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + if (_downsampling > 1.0) + { + CGSize rotatedSize = [saturationFilter rotatedSize:newSize forIndex:textureIndex]; + + [saturationFilter forceProcessingAtSize:CGSizeMake(rotatedSize.width / _downsampling, rotatedSize.height / _downsampling)]; + [luminanceRangeFilter forceProcessingAtSize:rotatedSize]; + } + + [super setInputSize:newSize atIndex:textureIndex]; +} + +#pragma mark - +#pragma mark Accessors + +// From Apple's UIImage+ImageEffects category: + +// A description of how to compute the box kernel width from the Gaussian +// radius (aka standard deviation) appears in the SVG spec: +// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement +// +// For larger values of 's' (s >= 2.0), an approximation can be used: Three +// successive box-blurs build a piece-wise quadratic convolution kernel, which +// approximates the Gaussian kernel to within roughly 3%. +// +// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5) +// +// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel. + + +- (void)setBlurRadiusInPixels:(CGFloat)newValue; +{ + blurFilter.blurRadiusInPixels = newValue; +} + +- (CGFloat)blurRadiusInPixels; +{ + return blurFilter.blurRadiusInPixels; +} + +- (void)setSaturation:(CGFloat)newValue; +{ + saturationFilter.saturation = newValue; +} + +- (CGFloat)saturation; +{ + return saturationFilter.saturation; +} + +- (void)setDownsampling:(CGFloat)newValue; +{ + _downsampling = newValue; +} + +@end diff --git a/GPUImage/Source/GPUimageDirectionalSobelEdgeDetectionFilter.h b/GPUImage/Source/GPUimageDirectionalSobelEdgeDetectionFilter.h new file mode 100644 index 0000000..3aca746 --- /dev/null +++ b/GPUImage/Source/GPUimageDirectionalSobelEdgeDetectionFilter.h @@ -0,0 +1,5 @@ +#import "GPUImage3x3TextureSamplingFilter.h" + +@interface GPUimageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter + +@end diff --git a/GPUImage/Source/GPUimageDirectionalSobelEdgeDetectionFilter.m b/GPUImage/Source/GPUimageDirectionalSobelEdgeDetectionFilter.m new file mode 100644 index 0000000..6844bbe --- /dev/null +++ b/GPUImage/Source/GPUimageDirectionalSobelEdgeDetectionFilter.m @@ -0,0 +1,103 @@ +#import "GPUimageDirectionalSobelEdgeDetectionFilter.h" + +@implementation GPUimageDirectionalSobelEdgeDetectionFilter + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING +( + precision mediump float; + + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + + vec2 gradientDirection; + gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + + float gradientMagnitude = length(gradientDirection); + vec2 normalizedDirection = normalize(gradientDirection); + normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away + normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0 + + gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0); + } +); +#else +NSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + varying vec2 leftTextureCoordinate; + varying vec2 rightTextureCoordinate; + + varying vec2 topTextureCoordinate; + varying vec2 topLeftTextureCoordinate; + varying vec2 topRightTextureCoordinate; + + varying vec2 bottomTextureCoordinate; + varying vec2 bottomLeftTextureCoordinate; + varying vec2 bottomRightTextureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r; + float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r; + float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r; + float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r; + float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r; + float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r; + float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r; + float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r; + + vec2 gradientDirection; + gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity; + gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity; + + float gradientMagnitude = length(gradientDirection); + vec2 normalizedDirection = normalize(gradientDirection); + normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away + normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0 + + gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0); + } +); +#endif + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)init; +{ + if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString])) + { + return nil; + } + + return self; +} + + +@end diff --git a/GPUImage/Source/Mac/GPUImage.h b/GPUImage/Source/Mac/GPUImage.h new file mode 100755 index 0000000..41176dc --- /dev/null +++ b/GPUImage/Source/Mac/GPUImage.h @@ -0,0 +1,155 @@ +#import + +// Base classes +#import +#import + +// Sources +#import +#import +#import +#import +#import + +// Filters +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import +#import + +// Outputs +#import +#import diff --git a/GPUImage/Source/Mac/GPUImageAVCamera.h b/GPUImage/Source/Mac/GPUImageAVCamera.h new file mode 100755 index 0000000..54c1a2f --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageAVCamera.h @@ -0,0 +1,132 @@ +#import +#import +#import +#import "GPUImageContext.h" +#import "GPUImageOutput.h" + +//Delegate Protocal for Face Detection. +@protocol GPUImageVideoCameraDelegate + +@optional +- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer; +@end + + +/** + A GPUImageOutput that provides frames from either camera +*/ +@interface GPUImageAVCamera : GPUImageOutput +{ + NSUInteger numberOfFramesCaptured; + CGFloat totalFrameTimeDuringCapture; + + AVCaptureSession *_captureSession; + AVCaptureDevice *_inputCamera; + AVCaptureDevice *_microphone; + AVCaptureDeviceInput *videoInput; + AVCaptureVideoDataOutput *videoOutput; + + BOOL capturePaused; + GPUImageRotationMode outputRotation; + dispatch_semaphore_t frameRenderingSemaphore; + + BOOL captureAsYUV; + GLuint luminanceTexture, chrominanceTexture; + + __unsafe_unretained id _delegate; +} + +/// The AVCaptureSession used to capture from the camera +@property(readonly, retain, nonatomic) AVCaptureSession *captureSession; + +/// This enables the capture session preset to be changed on the fly +@property (readwrite, nonatomic, copy) NSString *captureSessionPreset; + +/// This sets the frame rate of the camera (iOS 5 and above only) +/** + Setting this to 0 or below will set the frame rate back to the default setting for a particular preset. + */ +@property (readwrite) NSInteger frameRate; + +/// Easy way to tell if front-facing camera is present on device +@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent; + +/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console +@property(readwrite, nonatomic) BOOL runBenchmark; + +/// Use this property to manage camera settings. Focus point, exposure point, etc. +@property(readonly) AVCaptureDevice *inputCamera; + +/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO. +@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera; + +@property(nonatomic, assign) id delegate; + +/// @name Initialization and teardown + ++ (NSArray *)connectedCameraDevices; + +/** Begin a capture session + + See AVCaptureSession for acceptable values + + @param sessionPreset Session preset to use + @param cameraPosition Camera to capture from + */ +- (id)initWithDeviceUniqueID:(NSString *)deviceUniqueID; +- (id)initWithSessionPreset:(NSString *)sessionPreset deviceUniqueID:(NSString *)deviceUniqueID; +- (id)initWithSessionPreset:(NSString *)sessionPreset cameraDevice:(AVCaptureDevice *)cameraDevice; + +/** Tear down the capture session + */ +- (void)removeInputsAndOutputs; + +/// @name Manage the camera video stream + +/** Start camera capturing + */ +- (void)startCameraCapture; + +/** Stop camera capturing + */ +- (void)stopCameraCapture; + +/** Pause camera capturing + */ +- (void)pauseCameraCapture; + +/** Resume camera capturing + */ +- (void)resumeCameraCapture; + +/** Process a video sample + @param sampleBuffer Buffer to process + */ +- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +/** Process an audio sample + @param sampleBuffer Buffer to process + */ +- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; + +/** Get the position (front, rear) of the source camera + */ +- (AVCaptureDevicePosition)cameraPosition; + +/** Get the AVCaptureConnection of the source camera + */ +- (AVCaptureConnection *)videoCaptureConnection; + +/** This flips between the front and rear cameras + */ +- (void)rotateCamera; + +/// @name Benchmarking + +/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display + */ +- (CGFloat)averageFrameDurationDuringCapture; + +- (void)printSupportedPixelFormats; + +@end diff --git a/GPUImage/Source/Mac/GPUImageAVCamera.m b/GPUImage/Source/Mac/GPUImageAVCamera.m new file mode 100644 index 0000000..eed7a91 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageAVCamera.m @@ -0,0 +1,865 @@ +#import "GPUImageAVCamera.h" +#import "GPUImageMovieWriter.h" +#import "GPUImageFilter.h" + +NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D luminanceTexture; + uniform sampler2D chrominanceTexture; + + void main() + { + vec3 yuv; + vec3 rgb; + + yuv.x = texture2D(luminanceTexture, textureCoordinate).r; + yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5); + + // BT.601, which is the standard for SDTV is provided as a reference + /* + rgb = mat3( 1, 1, 1, + 0, -.39465, 2.03211, + 1.13983, -.58060, 0) * yuv; + */ + + // Using BT.709 which is the standard for HDTV + rgb = mat3( 1, 1, 1, + 0, -.21482, 2.12798, + 1.28033, -.38059, 0) * yuv; + + gl_FragColor = vec4(rgb, 1); + } +); + +NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D luminanceTexture; + uniform sampler2D chrominanceTexture; + + void main() + { + vec3 yuv; + vec3 rgb; + + yuv.x = texture2D(luminanceTexture, textureCoordinate).r; + yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5); + + // BT.601, which is the standard for SDTV is provided as a reference + /* + rgb = mat3( 1, 1, 1, + 0, -.39465, 2.03211, + 1.13983, -.58060, 0) * yuv; + */ + + // Using BT.709 which is the standard for HDTV + rgb = mat3( 1, 1, 1, + 0, -.21482, 2.12798, + 1.28033, -.38059, 0) * yuv; + + gl_FragColor = vec4(rgb, 1); + } + ); + + +#pragma mark - +#pragma mark Private methods and instance variables + +@interface GPUImageAVCamera () +{ + AVCaptureDeviceInput *audioInput; + AVCaptureAudioDataOutput *audioOutput; + NSDate *startingCaptureTime; + + NSInteger _frameRate; + + dispatch_queue_t cameraProcessingQueue, audioProcessingQueue; + + GLProgram *yuvConversionProgram; + GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute; + GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform; + GLuint yuvConversionFramebuffer; + + int imageBufferWidth, imageBufferHeight; +} + +- (void)updateOrientationSendToTargets; +- (void)convertYUVToRGBOutput; +- (void)setYUVConversionFBO; + +@end + +@implementation GPUImageAVCamera + +@synthesize captureSessionPreset = _captureSessionPreset; +@synthesize captureSession = _captureSession; +@synthesize inputCamera = _inputCamera; +@synthesize runBenchmark = _runBenchmark; +@synthesize delegate = _delegate; +@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera; + +#pragma mark - +#pragma mark Initialization and teardown + ++ (NSArray *)connectedCameraDevices; +{ + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + return devices; +} + +- (id)init; +{ + if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraDevice:nil])) + { + return nil; + } + + return self; +} + +- (id)initWithDeviceUniqueID:(NSString *)deviceUniqueID; +{ + if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 deviceUniqueID:deviceUniqueID])) + { + return nil; + } + + return self; +} + +- (id)initWithSessionPreset:(NSString *)sessionPreset deviceUniqueID:(NSString *)deviceUniqueID; +{ + if (!(self = [self initWithSessionPreset:sessionPreset cameraDevice:[AVCaptureDevice deviceWithUniqueID:deviceUniqueID]])) + { + return nil; + } + + return self; +} + +- (id)initWithSessionPreset:(NSString *)sessionPreset cameraDevice:(AVCaptureDevice *)cameraDevice; +{ + if (!(self = [super init])) + { + return nil; + } + + cameraProcessingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", NULL); + audioProcessingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioProcessingQueue", NULL); + frameRenderingSemaphore = dispatch_semaphore_create(1); + + _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above + _runBenchmark = NO; + capturePaused = NO; + outputRotation = kGPUImageNoRotation; + captureAsYUV = YES; + + runSynchronouslyOnVideoProcessingQueue(^{ + + if (captureAsYUV) + { + [GPUImageContext useImageProcessingContext]; +// if ([GPUImageContext deviceSupportsRedTextures]) +// { +// yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString]; +// } +// else +// { + yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString]; +// } + + if (!yuvConversionProgram.initialized) + { + [yuvConversionProgram addAttribute:@"position"]; + [yuvConversionProgram addAttribute:@"inputTextureCoordinate"]; + + if (![yuvConversionProgram link]) + { + NSString *progLog = [yuvConversionProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [yuvConversionProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [yuvConversionProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + yuvConversionProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"]; + yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"]; + yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"]; + yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"]; + + [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; + + glEnableVertexAttribArray(yuvConversionPositionAttribute); + glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute); + } + + [self initializeOutputTextureIfNeeded]; + }); + + // Grab the back-facing or front-facing camera + _inputCamera = nil; + + if (cameraDevice == nil) + { + _inputCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + else + { + _inputCamera = cameraDevice; + } + + if (!_inputCamera) { + return nil; + } + + // Create the capture session + _captureSession = [[AVCaptureSession alloc] init]; + + [_captureSession beginConfiguration]; + + // Add the video input + NSError *error = nil; + videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error]; + if ([_captureSession canAddInput:videoInput]) + { + [_captureSession addInput:videoInput]; + } + + // Add the video frame output + videoOutput = [[AVCaptureVideoDataOutput alloc] init]; + [videoOutput setAlwaysDiscardsLateVideoFrames:NO]; + +// NSLog(@"Camera: %@", _inputCamera); +// [self printSupportedPixelFormats]; + +// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + if (captureAsYUV && [GPUImageContext supportsFastTextureUpload]) + { + BOOL supportsFullYUVRange = NO; + NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes; + for (NSNumber *currentPixelFormat in supportedPixelFormats) + { + if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) + { + supportsFullYUVRange = YES; + } + } + + if (supportsFullYUVRange) + { + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + else + { + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + } + else + { + // Despite returning a longer list of supported pixel formats, only RGB, RGBA, BGRA, and the YUV 4:2:2 variants seem to return cleanly +// [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_422YpCbCr8_yuvs] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; + } + + [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue]; +// [videoOutput setSampleBufferDelegate:self queue:[GPUImageContext sharedContextQueue]]; + if ([_captureSession canAddOutput:videoOutput]) + { + [_captureSession addOutput:videoOutput]; + } + else + { + NSLog(@"Couldn't add video output"); + return nil; + } + + _captureSessionPreset = sessionPreset; + [_captureSession setSessionPreset:_captureSessionPreset]; + +// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset +// AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; +// +// if (conn.supportsVideoMinFrameDuration) +// conn.videoMinFrameDuration = CMTimeMake(1,60); +// if (conn.supportsVideoMaxFrameDuration) +// conn.videoMaxFrameDuration = CMTimeMake(1,60); + + [_captureSession commitConfiguration]; + + return self; +} + +- (void)dealloc +{ + [self stopCameraCapture]; + [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()]; + [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()]; + + [self removeInputsAndOutputs]; + +// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required. +#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) ) + if (cameraProcessingQueue != NULL) + { + dispatch_release(cameraProcessingQueue); + } + + if (audioProcessingQueue != NULL) + { + dispatch_release(audioProcessingQueue); + } + + if (frameRenderingSemaphore != NULL) + { + dispatch_release(frameRenderingSemaphore); + } +#endif + +// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) + if (captureAsYUV && [GPUImageContext supportsFastTextureUpload]) + { + [self destroyYUVConversionFBO]; + } +} + +- (void)removeInputsAndOutputs; +{ + [_captureSession removeInput:videoInput]; + [_captureSession removeOutput:videoOutput]; + if (_microphone != nil) + { + [_captureSession removeInput:audioInput]; + [_captureSession removeOutput:audioOutput]; + } +} + +#pragma mark - +#pragma mark Managing targets + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + [super addTarget:newTarget atTextureLocation:textureLocation]; + + [newTarget setInputRotation:outputRotation atIndex:textureLocation]; +} + +#pragma mark - +#pragma mark Manage the camera video stream + +- (void)startCameraCapture; +{ + if (![_captureSession isRunning]) + { + startingCaptureTime = [NSDate date]; + [_captureSession startRunning]; + }; +} + +- (void)stopCameraCapture; +{ + if ([_captureSession isRunning]) + { + [_captureSession stopRunning]; + } +} + +- (void)pauseCameraCapture; +{ + capturePaused = YES; +} + +- (void)resumeCameraCapture; +{ + capturePaused = NO; +} + +- (void)rotateCamera +{ + if (self.frontFacingCameraPresent == NO) + return; + + NSError *error; + AVCaptureDeviceInput *newVideoInput; + AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position]; + + if (currentCameraPosition == AVCaptureDevicePositionBack) + { + currentCameraPosition = AVCaptureDevicePositionFront; + } + else + { + currentCameraPosition = AVCaptureDevicePositionBack; + } + + AVCaptureDevice *backFacingCamera = nil; + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + for (AVCaptureDevice *device in devices) + { + if ([device position] == currentCameraPosition) + { + backFacingCamera = device; + } + } + newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error]; + + if (newVideoInput != nil) + { + [_captureSession beginConfiguration]; + + [_captureSession removeInput:videoInput]; + if ([_captureSession canAddInput:newVideoInput]) + { + [_captureSession addInput:newVideoInput]; + videoInput = newVideoInput; + } + else + { + [_captureSession addInput:videoInput]; + } + //captureSession.sessionPreset = oriPreset; + [_captureSession commitConfiguration]; + } + + _inputCamera = backFacingCamera; +} + +- (AVCaptureDevicePosition)cameraPosition +{ + return [[videoInput device] position]; +} + +- (BOOL)isFrontFacingCameraPresent; +{ + NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + + for (AVCaptureDevice *device in devices) + { + if ([device position] == AVCaptureDevicePositionFront) + return YES; + } + + return NO; +} + +- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset; +{ + [_captureSession beginConfiguration]; + + _captureSessionPreset = captureSessionPreset; + [_captureSession setSessionPreset:_captureSessionPreset]; + + [_captureSession commitConfiguration]; +} + +- (void)setFrameRate:(NSInteger)frameRate; +{ + _frameRate = frameRate; + + if (_frameRate > 0) + { + for (AVCaptureConnection *connection in videoOutput.connections) + { + if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)]) + connection.videoMinFrameDuration = CMTimeMake(1, _frameRate); + + } + } + else + { + for (AVCaptureConnection *connection in videoOutput.connections) + { + if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)]) + connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default + } + } +} + +- (NSInteger)frameRate; +{ + return _frameRate; +} + +- (AVCaptureConnection *)videoCaptureConnection { + for (AVCaptureConnection *connection in [videoOutput connections] ) { + for ( AVCaptureInputPort *port in [connection inputPorts] ) { + if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) { + return connection; + } + } + } + + return nil; +} + +#define INITIALFRAMESTOIGNOREFORBENCHMARK 5 + +- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime; +{ + for (id currentTarget in targets) + { + if ([currentTarget enabled]) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + if (currentTarget != self.targetToIgnoreForUpdates) + { + [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget]; + [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget]; + + if ([currentTarget wantsMonochromeInput] && captureAsYUV) + { + [currentTarget setCurrentlyReceivingMonochromeInput:YES]; + [currentTarget setInputTexture:luminanceTexture atIndex:textureIndexOfTarget]; + } + else + { + [currentTarget setCurrentlyReceivingMonochromeInput:NO]; + [currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget]; + } + + [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget]; + } + else + { + [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget]; + [currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget]; + } + } + } +} + +- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; +{ + if (capturePaused) + { + return; + } + + CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); + CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer); + GLsizei bufferWidth = (GLsizei)CVPixelBufferGetWidth(cameraFrame); + GLsizei bufferHeight = (GLsizei)CVPixelBufferGetHeight(cameraFrame); + + CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + + [GPUImageContext useImageProcessingContext]; + + CVPixelBufferLockBaseAddress(cameraFrame, 0); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + + // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame)); + + // Using BGRA extension to pull in video frame data directly +// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, bytesPerRow / 3, bufferHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame)); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, CVPixelBufferGetBaseAddress(cameraFrame)); +// glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame)); + + for (id currentTarget in targets) + { + if ([currentTarget enabled]) + { + if (currentTarget != self.targetToIgnoreForUpdates) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget]; + [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget]; + } + } + } + + CVPixelBufferUnlockBaseAddress(cameraFrame, 0); + + if (_runBenchmark) + { + numberOfFramesCaptured++; + if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK) + { + CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); + totalFrameTimeDuringCapture += currentFrameTime; + NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]); + NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime); + } + } +} + +- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; +{ + [self.audioEncodingTarget processAudioBuffer:sampleBuffer]; +} + +- (void)convertYUVToRGBOutput; +{ + [GPUImageContext setActiveShaderProgram:yuvConversionProgram]; + [self setYUVConversionFBO]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat textureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, luminanceTexture); + glUniform1i(yuvConversionLuminanceTextureUniform, 4); + + glActiveTexture(GL_TEXTURE5); + glBindTexture(GL_TEXTURE_2D, chrominanceTexture); + glUniform1i(yuvConversionChrominanceTextureUniform, 5); + + glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); +} + +- (void)setYUVConversionFBO; +{ + if (!yuvConversionFramebuffer) + { + [self createYUVConversionFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, yuvConversionFramebuffer); + + glViewport(0, 0, imageBufferWidth, imageBufferHeight); +} + +- (void)createYUVConversionFBO; +{ + [self initializeOutputTextureIfNeeded]; + + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &yuvConversionFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, yuvConversionFramebuffer); + + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageBufferWidth, imageBufferHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + [self notifyTargetsAboutNewOutputTexture]; + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); + glBindTexture(GL_TEXTURE_2D, 0); + +} + +- (void)destroyYUVConversionFBO; +{ + if (yuvConversionFramebuffer) + { + glDeleteFramebuffers(1, &yuvConversionFramebuffer); + yuvConversionFramebuffer = 0; + } + + if (outputTexture) + { + glDeleteTextures(1, &outputTexture); + outputTexture = 0; + } +} + + +#pragma mark - +#pragma mark Benchmarking + +- (CGFloat)averageFrameDurationDuringCapture; +{ + return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0; +} + +#pragma mark - +#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate + +- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ + if (captureOutput == audioOutput) + { +// if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) +// { +// return; +// } + + CFRetain(sampleBuffer); + runAsynchronouslyOnVideoProcessingQueue(^{ + [self processAudioSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); +// dispatch_semaphore_signal(frameRenderingSemaphore); + }); + } + else + { + if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) + { + return; + } + + CFRetain(sampleBuffer); + runAsynchronouslyOnVideoProcessingQueue(^{ + //Feature Detection Hook. + if (self.delegate && [self.delegate respondsToSelector:@selector(willOutputSampleBuffer:)]) + { + [self.delegate willOutputSampleBuffer:sampleBuffer]; + } + + [self processVideoSampleBuffer:sampleBuffer]; + + CFRelease(sampleBuffer); + dispatch_semaphore_signal(frameRenderingSemaphore); + }); + } +} + +#pragma mark - +#pragma mark Accessors + +- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [_captureSession beginConfiguration]; + + if (newValue == nil) + { + if (audioOutput) + { + [_captureSession removeInput:audioInput]; + [_captureSession removeOutput:audioOutput]; + audioInput = nil; + audioOutput = nil; + _microphone = nil; + } + } + else + { + _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; + audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil]; + if ([_captureSession canAddInput:audioInput]) + { + [_captureSession addInput:audioInput]; + } + audioOutput = [[AVCaptureAudioDataOutput alloc] init]; + + if ([_captureSession canAddOutput:audioOutput]) + { + [_captureSession addOutput:audioOutput]; + } + else + { + NSLog(@"Couldn't add audio output"); + } + [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue]; + } + + [_captureSession commitConfiguration]; + + [super setAudioEncodingTarget:newValue]; + }); +} + +- (void)updateOrientationSendToTargets; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + + // From the iOS 5.0 release notes: + // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight. + + outputRotation = kGPUImageNoRotation; + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]]; + } + }); +} + +- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue +{ + _horizontallyMirrorFrontFacingCamera = newValue; + [self updateOrientationSendToTargets]; +} + +- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue +{ + _horizontallyMirrorRearFacingCamera = newValue; + [self updateOrientationSendToTargets]; +} + +- (void)printSupportedPixelFormats; +{ + NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes; + for (NSNumber *currentPixelFormat in supportedPixelFormats) + { + NSString *pixelFormatName = nil; + + switch([currentPixelFormat intValue]) + { + case kCVPixelFormatType_1Monochrome: pixelFormatName = @"kCVPixelFormatType_1Monochrome"; break; + case kCVPixelFormatType_2Indexed: pixelFormatName = @"kCVPixelFormatType_2Indexed"; break; + case kCVPixelFormatType_4Indexed: pixelFormatName = @"kCVPixelFormatType_4Indexed"; break; + case kCVPixelFormatType_8Indexed: pixelFormatName = @"kCVPixelFormatType_8Indexed"; break; + case kCVPixelFormatType_1IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_1IndexedGray_WhiteIsZero"; break; + case kCVPixelFormatType_2IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_2IndexedGray_WhiteIsZero"; break; + case kCVPixelFormatType_4IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_4IndexedGray_WhiteIsZero"; break; + case kCVPixelFormatType_8IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_8IndexedGray_WhiteIsZero"; break; + case kCVPixelFormatType_16BE555: pixelFormatName = @"kCVPixelFormatType_16BE555"; break; + case kCVPixelFormatType_16LE555: pixelFormatName = @"kCVPixelFormatType_16LE555"; break; + case kCVPixelFormatType_16LE5551: pixelFormatName = @"kCVPixelFormatType_16LE5551"; break; + case kCVPixelFormatType_16BE565: pixelFormatName = @"kCVPixelFormatType_16BE565"; break; + case kCVPixelFormatType_16LE565: pixelFormatName = @"kCVPixelFormatType_16LE565"; break; + case kCVPixelFormatType_24RGB: pixelFormatName = @"kCVPixelFormatType_24RGB"; break; + case kCVPixelFormatType_24BGR: pixelFormatName = @"kCVPixelFormatType_24BGR"; break; + case kCVPixelFormatType_32ARGB: pixelFormatName = @"kCVPixelFormatType_32ARGB"; break; + case kCVPixelFormatType_32BGRA: pixelFormatName = @"kCVPixelFormatType_32BGRA"; break; + case kCVPixelFormatType_32ABGR: pixelFormatName = @"kCVPixelFormatType_32ABGR"; break; + case kCVPixelFormatType_32RGBA: pixelFormatName = @"kCVPixelFormatType_32RGBA"; break; + case kCVPixelFormatType_64ARGB: pixelFormatName = @"kCVPixelFormatType_64ARGB"; break; + case kCVPixelFormatType_48RGB: pixelFormatName = @"kCVPixelFormatType_48RGB"; break; + case kCVPixelFormatType_32AlphaGray: pixelFormatName = @"kCVPixelFormatType_32AlphaGray"; break; + case kCVPixelFormatType_16Gray: pixelFormatName = @"kCVPixelFormatType_16Gray"; break; + case kCVPixelFormatType_30RGB: pixelFormatName = @"kCVPixelFormatType_30RGB"; break; + case kCVPixelFormatType_422YpCbCr8: pixelFormatName = @"kCVPixelFormatType_422YpCbCr8"; break; + case kCVPixelFormatType_4444YpCbCrA8: pixelFormatName = @"kCVPixelFormatType_4444YpCbCrA8"; break; + case kCVPixelFormatType_4444YpCbCrA8R: pixelFormatName = @"kCVPixelFormatType_4444YpCbCrA8R"; break; + case kCVPixelFormatType_4444AYpCbCr8: pixelFormatName = @"kCVPixelFormatType_4444AYpCbCr8"; break; + case kCVPixelFormatType_4444AYpCbCr16: pixelFormatName = @"kCVPixelFormatType_4444AYpCbCr16"; break; + case kCVPixelFormatType_444YpCbCr8: pixelFormatName = @"kCVPixelFormatType_444YpCbCr8"; break; + case kCVPixelFormatType_422YpCbCr16: pixelFormatName = @"kCVPixelFormatType_422YpCbCr16"; break; + case kCVPixelFormatType_422YpCbCr10: pixelFormatName = @"kCVPixelFormatType_422YpCbCr10"; break; + case kCVPixelFormatType_444YpCbCr10: pixelFormatName = @"kCVPixelFormatType_444YpCbCr10"; break; + case kCVPixelFormatType_420YpCbCr8Planar: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8Planar"; break; + case kCVPixelFormatType_420YpCbCr8PlanarFullRange: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8PlanarFullRange"; break; + case kCVPixelFormatType_422YpCbCr_4A_8BiPlanar: pixelFormatName = @"kCVPixelFormatType_422YpCbCr_4A_8BiPlanar"; break; + case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange"; break; + case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8BiPlanarFullRange"; break; + case kCVPixelFormatType_422YpCbCr8_yuvs: pixelFormatName = @"kCVPixelFormatType_422YpCbCr8_yuvs"; break; + case kCVPixelFormatType_422YpCbCr8FullRange: pixelFormatName = @"kCVPixelFormatType_422YpCbCr8FullRange"; break; + case kCVPixelFormatType_OneComponent8: pixelFormatName = @"kCVPixelFormatType_OneComponent8"; break; + case kCVPixelFormatType_TwoComponent8: pixelFormatName = @"kCVPixelFormatType_TwoComponent8"; break; + } + NSLog(@"Supported pixel format: %@", pixelFormatName); + } +} + +@end diff --git a/GPUImage/Source/Mac/GPUImageContext.h b/GPUImage/Source/Mac/GPUImageContext.h new file mode 100755 index 0000000..8d54ca2 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageContext.h @@ -0,0 +1,61 @@ +#import +#import +#import +#import +#import +#import "GLProgram.h" + +#define GPUImageRotationSwapsWidthAndHeight(rotation) (((rotation) == kGPUImageRotateLeft) || ((rotation) == kGPUImageRotateRight) || ((rotation) == kGPUImageRotateRightFlipVertical) ) + +typedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 } GPUImageRotationMode; + +@interface GPUImageContext : NSObject + +@property(readonly, nonatomic) dispatch_queue_t contextQueue; +@property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram; +@property(readonly, retain, nonatomic) NSOpenGLContext *context; + ++ (void *)contextKey; ++ (GPUImageContext *)sharedImageProcessingContext; ++ (dispatch_queue_t)sharedContextQueue; ++ (void)useImageProcessingContext; ++ (void)setActiveShaderProgram:(GLProgram *)shaderProgram; ++ (GLint)maximumTextureSizeForThisDevice; ++ (GLint)maximumTextureUnitsForThisDevice; ++ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; ++ (BOOL)deviceSupportsRedTextures; ++ (BOOL)deviceSupportsFramebufferReads; ++ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; + +- (void)presentBufferForDisplay; +- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; + +- (void)useSharegroup:(CGLShareGroupObj *)sharegroup; + +// Manage fast texture upload ++ (BOOL)supportsFastTextureUpload; + +@end + +@protocol GPUImageTextureDelegate; + +@protocol GPUImageInput +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +- (NSInteger)nextAvailableTextureIndex; +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +- (CGSize)maximumOutputSize; +- (void)endProcessing; +- (BOOL)shouldIgnoreUpdatesToThisTarget; +- (BOOL)enabled; +- (void)conserveMemoryForNextFrame; +- (BOOL)wantsMonochromeInput; +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +@end + +@protocol GPUImageTextureDelegate +- (void)textureNoLongerNeededForTarget:(id)textureTarget; +@end + diff --git a/GPUImage/Source/Mac/GPUImageContext.m b/GPUImage/Source/Mac/GPUImageContext.m new file mode 100755 index 0000000..31fef7b --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageContext.m @@ -0,0 +1,233 @@ +#import "GPUImageContext.h" +#import + +@interface GPUImageContext() +{ + NSMutableDictionary *shaderProgramCache; + CGLShareGroupObj *_sharegroup; +} + +@end + +@implementation GPUImageContext + +@synthesize context = _context; +@synthesize currentShaderProgram = _currentShaderProgram; +@synthesize contextQueue = _contextQueue; + +static void *openGLESContextQueueKey; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + openGLESContextQueueKey = &openGLESContextQueueKey; + _contextQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.openGLESContextQueue", NULL); + dispatch_queue_set_specific(_contextQueue, openGLESContextQueueKey, (__bridge void *)self, NULL); + shaderProgramCache = [[NSMutableDictionary alloc] init]; + + return self; +} + ++ (void *)contextKey { + return openGLESContextQueueKey; +} + +// Based on Colin Wheeler's example here: http://cocoasamurai.blogspot.com/2011/04/singletons-your-doing-them-wrong.html ++ (GPUImageContext *)sharedImageProcessingContext; +{ + static dispatch_once_t pred; + static GPUImageContext *sharedImageProcessingContext = nil; + + dispatch_once(&pred, ^{ + sharedImageProcessingContext = [[[self class] alloc] init]; + }); + return sharedImageProcessingContext; +} + ++ (dispatch_queue_t)sharedContextQueue; +{ + return [[self sharedImageProcessingContext] contextQueue]; +} + ++ (void)useImageProcessingContext; +{ + NSOpenGLContext *imageProcessingContext = [[GPUImageContext sharedImageProcessingContext] context]; + if ([NSOpenGLContext currentContext] != imageProcessingContext) + { + [imageProcessingContext makeCurrentContext]; + } +} + ++ (void)setActiveShaderProgram:(GLProgram *)shaderProgram; +{ + GPUImageContext *sharedContext = [GPUImageContext sharedImageProcessingContext]; + NSOpenGLContext *imageProcessingContext = [sharedContext context]; + if ([NSOpenGLContext currentContext] != imageProcessingContext) + { + [imageProcessingContext makeCurrentContext]; + } + + if (sharedContext.currentShaderProgram != shaderProgram) + { + sharedContext.currentShaderProgram = shaderProgram; + [shaderProgram use]; + } +} + ++ (GLint)maximumTextureSizeForThisDevice; +{ + static dispatch_once_t pred; + static GLint maxTextureSize = 0; + + dispatch_once(&pred, ^{ + [self useImageProcessingContext]; + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize); + }); + + return maxTextureSize; +} + ++ (GLint)maximumTextureUnitsForThisDevice; +{ + GLint maxTextureUnits; + glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits); + return maxTextureUnits; +} + ++ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; +{ + static dispatch_once_t pred; + static NSArray *extensionNames = nil; + + // Cache extensions for later quick reference, since this won't change for a given device + dispatch_once(&pred, ^{ + [GPUImageContext useImageProcessingContext]; + NSString *extensionsString = [NSString stringWithCString:(const char *)glGetString(GL_EXTENSIONS) encoding:NSASCIIStringEncoding]; + extensionNames = [extensionsString componentsSeparatedByString:@" "]; + }); + + return [extensionNames containsObject:extension]; +} + ++ (BOOL)deviceSupportsFramebufferReads; +{ + return NO; +} + +// http://www.khronos.org/registry/gles/extensions/EXT/EXT_texture_rg.txt + ++ (BOOL)deviceSupportsRedTextures; +{ + static dispatch_once_t pred; + static BOOL supportsRedTextures = NO; + + dispatch_once(&pred, ^{ + supportsRedTextures = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_texture_rg"]; + }); + + return supportsRedTextures; +} + + ++ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; +{ + GLint maxTextureSize = [self maximumTextureSizeForThisDevice]; + if ( (inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize) ) + { + return inputSize; + } + + CGSize adjustedSize; + if (inputSize.width > inputSize.height) + { + adjustedSize.width = (CGFloat)maxTextureSize; + adjustedSize.height = ((CGFloat)maxTextureSize / inputSize.width) * inputSize.height; + } + else + { + adjustedSize.height = (CGFloat)maxTextureSize; + adjustedSize.width = ((CGFloat)maxTextureSize / inputSize.height) * inputSize.width; + } + + return adjustedSize; +} + +- (void)presentBufferForDisplay; +{ + [self.context flushBuffer]; +} + +- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; +{ + NSString *lookupKeyForShaderProgram = [NSString stringWithFormat:@"V: %@ - F: %@", vertexShaderString, fragmentShaderString]; + GLProgram *programFromCache = [shaderProgramCache objectForKey:lookupKeyForShaderProgram]; + + if (programFromCache == nil) + { + programFromCache = [[GLProgram alloc] initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]; + [shaderProgramCache setObject:programFromCache forKey:lookupKeyForShaderProgram]; + } + + return programFromCache; +} + +- (void)useSharegroup:(CGLShareGroupObj *)sharegroup; +{ + NSAssert(_context == nil, @"Unable to use a share group when the context has already been created. Call this method before you use the context for the first time."); + + _sharegroup = sharegroup; +} + +- (NSOpenGLContext *)createContext; +{ + NSOpenGLPixelFormatAttribute pixelFormatAttributes[] = { + NSOpenGLPFADoubleBuffer, + NSOpenGLPFAAccelerated, 0, + 0 + }; + + NSOpenGLPixelFormat *pixelFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:pixelFormatAttributes]; + if (pixelFormat == nil) + { + NSLog(@"Error: No appropriate pixel format found"); + } + // TODO: Take into account the sharegroup + NSOpenGLContext *context = [[NSOpenGLContext alloc] initWithFormat:pixelFormat shareContext:nil]; + + NSAssert(context != nil, @"Unable to create an OpenGL context. The GPUImage framework requires OpenGL support to work."); + return context; +} + + +#pragma mark - +#pragma mark Manage fast texture upload + ++ (BOOL)supportsFastTextureUpload; +{ + // This may need to be redone to account for the Mac's accelerated data transfer methods + return NO; +} + +#pragma mark - +#pragma mark Accessors + +- (NSOpenGLContext *)context; +{ + if (_context == nil) + { + _context = [self createContext]; + [_context makeCurrentContext]; + + // Set up a few global settings for the image processing pipeline + glDisable(GL_DEPTH_TEST); + glEnable(GL_TEXTURE_2D); + } + + return _context; +} + +@end diff --git a/GPUImage/Source/Mac/GPUImageMac-Info.plist b/GPUImage/Source/Mac/GPUImageMac-Info.plist new file mode 100644 index 0000000..88daf2c --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageMac-Info.plist @@ -0,0 +1,30 @@ + + + + + CFBundleDevelopmentRegion + English + CFBundleExecutable + ${EXECUTABLE_NAME} + CFBundleIconFile + + CFBundleIdentifier + com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier} + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + ${PRODUCT_NAME} + CFBundlePackageType + FMWK + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1 + NSHumanReadableCopyright + Copyright © 2013 Sunset Lake Software LLC. All rights reserved. + NSPrincipalClass + + + diff --git a/GPUImage/Source/Mac/GPUImageMac-Prefix.pch b/GPUImage/Source/Mac/GPUImageMac-Prefix.pch new file mode 100644 index 0000000..f01e581 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageMac-Prefix.pch @@ -0,0 +1,7 @@ +// +// Prefix header for all source files of the 'GPUImageMac' target in the 'GPUImageMac' project +// + +#ifdef __OBJC__ + #import +#endif diff --git a/GPUImage/Source/Mac/GPUImageMovieWriter.h b/GPUImage/Source/Mac/GPUImageMovieWriter.h new file mode 100755 index 0000000..f9edc87 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageMovieWriter.h @@ -0,0 +1,59 @@ +#import +#import +#import "GPUImageContext.h" + +extern NSString *const kGPUImageColorSwizzlingFragmentShaderString; + +@protocol GPUImageMovieWriterDelegate + +@optional +- (void)movieRecordingCompleted; +- (void)movieRecordingFailedWithError:(NSError*)error; + +@end + +@interface GPUImageMovieWriter : NSObject +{ + CMVideoDimensions videoDimensions; + CMVideoCodecType videoType; + + NSURL *movieURL; + NSString *fileType; + AVAssetWriter *assetWriter; + AVAssetWriterInput *assetWriterAudioInput; + AVAssetWriterInput *assetWriterVideoInput; + AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput; + dispatch_queue_t movieWritingQueue; + + CGSize videoSize; + GPUImageRotationMode inputRotation; + + __unsafe_unretained id textureDelegate; +} + +@property(readwrite, nonatomic) BOOL hasAudioTrack; +@property(readwrite, nonatomic) BOOL shouldPassthroughAudio; +@property(nonatomic, copy) void(^completionBlock)(void); +@property(nonatomic, copy) void(^failureBlock)(NSError*); +@property(nonatomic, assign) id delegate; +@property(readwrite, nonatomic) BOOL encodingLiveVideo; +@property(nonatomic, copy) void(^videoInputReadyCallback)(void); +@property(nonatomic, copy) void(^audioInputReadyCallback)(void); +@property(nonatomic) BOOL enabled; + +// Initialization and teardown +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize; +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings; + +- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings; + +// Movie recording +- (void)startRecording; +- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform; +- (void)finishRecording; +- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler; +- (void)cancelRecording; +- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer; +- (void)enableSynchronizationCallbacks; + +@end diff --git a/GPUImage/Source/Mac/GPUImageMovieWriter.m b/GPUImage/Source/Mac/GPUImageMovieWriter.m new file mode 100755 index 0000000..e27e32e --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageMovieWriter.m @@ -0,0 +1,641 @@ +#import "GPUImageMovieWriter.h" + +#import "GPUImageContext.h" +#import "GLProgram.h" +#import "GPUImageFilter.h" + +#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra; + } + ); +#else +NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING +( + varying vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra; + } + ); +#endif + + +@interface GPUImageMovieWriter () +{ + GLuint movieFramebuffer, movieRenderbuffer; + + GLProgram *colorSwizzlingProgram; + GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute; + GLint colorSwizzlingInputTextureUniform; + + GLuint inputTextureForMovieRendering; + + GLubyte *frameData; + + CMTime startTime, previousFrameTime; + + BOOL isRecording; +} + +// Movie recording +- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings; + +// Frame rendering +- (void)createDataFBO; +- (void)destroyDataFBO; +- (void)setFilterFBO; + +- (void)renderAtInternalSize; + +@end + +@implementation GPUImageMovieWriter + +@synthesize hasAudioTrack = _hasAudioTrack; +@synthesize encodingLiveVideo = _encodingLiveVideo; +@synthesize shouldPassthroughAudio = _shouldPassthroughAudio; +@synthesize completionBlock; +@synthesize failureBlock; +@synthesize videoInputReadyCallback; +@synthesize audioInputReadyCallback; +@synthesize enabled; + +@synthesize delegate = _delegate; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize; +{ + return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil]; +} + +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings; +{ + if (!(self = [super init])) + { + return nil; + } + + self.enabled = YES; + + videoSize = newSize; + movieURL = newMovieURL; + fileType = newFileType; + startTime = kCMTimeInvalid; + _encodingLiveVideo = YES; + previousFrameTime = kCMTimeNegativeInfinity; + inputRotation = kGPUImageNoRotation; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if ([GPUImageContext supportsFastTextureUpload]) + { + colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; + } + else + { + colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString]; + } + + if (!colorSwizzlingProgram.initialized) + { + [colorSwizzlingProgram addAttribute:@"position"]; + [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"]; + + if (![colorSwizzlingProgram link]) + { + NSString *progLog = [colorSwizzlingProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [colorSwizzlingProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + colorSwizzlingProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"]; + colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"]; + colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"]; + + // REFACTOR: Wrap this in a block for the image processing queue + [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram]; + + glEnableVertexAttribArray(colorSwizzlingPositionAttribute); + glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute); + }); + + [self initializeMovieWithOutputSettings:outputSettings]; + + return self; +} + +- (void)dealloc; +{ + [self destroyDataFBO]; + + if (frameData != NULL) + { + free(frameData); + } +} + +#pragma mark - +#pragma mark Movie recording + +- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings; +{ + isRecording = NO; + + self.enabled = YES; + frameData = (GLubyte *) malloc((int)videoSize.width * (int)videoSize.height * 4); + +// frameData = (GLubyte *) calloc(videoSize.width * videoSize.height * 4, sizeof(GLubyte)); + NSError *error = nil; + assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error]; + if (error != nil) + { + NSLog(@"Error: %@", error); + if (failureBlock) + { + failureBlock(error); + } + else + { + if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)]) + { + [self.delegate movieRecordingFailedWithError:error]; + } + } + } + + // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000); + + // use default output settings if none specified + if (outputSettings == nil) + { + outputSettings = [[NSMutableDictionary alloc] init]; + [outputSettings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey]; + [outputSettings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey]; + [outputSettings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey]; + } + // custom output settings specified + else + { + NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey]; + NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey]; + NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey]; + + NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters."); + } + + /* + NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey, + [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey, + [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey, + [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey, + nil]; + + NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey, + [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey, + nil]; + + NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init]; + [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey]; + [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey]; + [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey]; + [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey]; + [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey]; + + [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey]; + */ + + assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings]; + assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo; + + // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. + NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, + [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey, + [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey, + nil]; +// NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, +// nil]; + + assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; + + [assetWriter addInput:assetWriterVideoInput]; +} + +- (void)startRecording; +{ + isRecording = YES; + startTime = kCMTimeInvalid; + // [assetWriter startWriting]; + + // [assetWriter startSessionAtSourceTime:kCMTimeZero]; +} + +- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform; +{ + assetWriterVideoInput.transform = orientationTransform; + + [self startRecording]; +} + +- (void)cancelRecording; +{ + if (assetWriter.status == AVAssetWriterStatusCompleted) + { + return; + } + + isRecording = NO; + runOnMainQueueWithoutDeadlocking(^{ + [assetWriterVideoInput markAsFinished]; + [assetWriterAudioInput markAsFinished]; + [assetWriter cancelWriting]; + }); +} + +- (void)finishRecording; +{ + [self finishRecordingWithCompletionHandler:nil]; +} + +- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler; +{ + if (assetWriter.status == AVAssetWriterStatusCompleted) + { + return; + } + + isRecording = NO; + runOnMainQueueWithoutDeadlocking(^{ + [assetWriterVideoInput markAsFinished]; + [assetWriterAudioInput markAsFinished]; +#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0)) + // Not iOS 6 SDK + [assetWriter finishWriting]; + if (handler) handler(); +#else + // iOS 6 SDK + if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) { + // Running iOS 6 + [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })]; + } + else { + // Not running iOS 6 + [assetWriter finishWriting]; + if (handler) handler(); + } +#endif + }); +} + +- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer; +{ + if (!isRecording) + { + return; + } + + if (_hasAudioTrack) + { + CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer); + + if (CMTIME_IS_INVALID(startTime)) + { + if (audioInputReadyCallback == NULL) + { + [assetWriter startWriting]; + } + [assetWriter startSessionAtSourceTime:currentSampleTime]; + startTime = currentSampleTime; + } + + if (!assetWriterAudioInput.readyForMoreMediaData) + { + NSLog(@"Had to drop an audio frame"); + return; + } + +// NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch); + [assetWriterAudioInput appendSampleBuffer:audioBuffer]; + } +} + +- (void)enableSynchronizationCallbacks; +{ + if (videoInputReadyCallback != NULL) + { + [assetWriter startWriting]; + [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:[GPUImageContext sharedContextQueue] usingBlock:videoInputReadyCallback]; + } + + if (audioInputReadyCallback != NULL) + { + [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:[GPUImageContext sharedContextQueue] usingBlock:audioInputReadyCallback]; + } + +} + +#pragma mark - +#pragma mark Frame rendering + +- (void)createDataFBO; +{ + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &movieFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer); + + glGenRenderbuffers(1, &movieRenderbuffer); + glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer); + glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8, (int)videoSize.width, (int)videoSize.height); + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); +} + +- (void)destroyDataFBO; +{ + [GPUImageContext useImageProcessingContext]; + + if (movieFramebuffer) + { + glDeleteFramebuffers(1, &movieFramebuffer); + movieFramebuffer = 0; + } + + if (movieRenderbuffer) + { + glDeleteRenderbuffers(1, &movieRenderbuffer); + movieRenderbuffer = 0; + } +} + +- (void)setFilterFBO; +{ + if (!movieFramebuffer) + { + [self createDataFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer); + + glViewport(0, 0, (int)videoSize.width, (int)videoSize.height); +} + +- (void)renderAtInternalSize; +{ + [GPUImageContext useImageProcessingContext]; + [self setFilterFBO]; + + [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram]; + + glClearColor(1.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + // This needs to be flipped to write out to video correctly + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat textureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, inputTextureForMovieRendering); + glUniform1i(colorSwizzlingInputTextureUniform, 4); + + glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + glFinish(); +} + +#pragma mark - +#pragma mark GPUImageInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + if (!isRecording) + { + return; + } + + // Drop frames forced by images and other things with no time constants + // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case + if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) ) + { + return; + } + + if (CMTIME_IS_INVALID(startTime)) + { + if (videoInputReadyCallback == NULL) + { + [assetWriter startWriting]; + } + + [assetWriter startSessionAtSourceTime:frameTime]; + startTime = frameTime; + } + + if (!assetWriterVideoInput.readyForMoreMediaData) + { + NSLog(@"Had to drop a video frame"); + return; + } + + // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames + [GPUImageContext useImageProcessingContext]; + [self renderAtInternalSize]; + + CVPixelBufferRef pixel_buffer = NULL; + + CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer); + if ((pixel_buffer == NULL) || (status != kCVReturnSuccess)) + { + return; + } + else + { + CVPixelBufferLockBaseAddress(pixel_buffer, 0); + + GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer); + glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData); + } + +// if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:CMTimeSubtract(frameTime, startTime)]) + if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime]) + { + NSLog(@"Problem appending pixel buffer at time: %lld", frameTime.value); + } + else + { +// NSLog(@"Recorded video sample time: %lld, %d, %lld", frameTime.value, frameTime.timescale, frameTime.epoch); + } + CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); + + previousFrameTime = frameTime; + + if (![GPUImageContext supportsFastTextureUpload]) + { + CVPixelBufferRelease(pixel_buffer); + } +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + inputTextureForMovieRendering = newInputTexture; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = newInputRotation; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ +} + +- (CGSize)maximumOutputSize; +{ + return videoSize; +} + +- (void)endProcessing +{ + if (completionBlock) + { + completionBlock(); + } + else + { + if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)]) + { + [_delegate movieRecordingCompleted]; + } + } +} + +- (BOOL)shouldIgnoreUpdatesToThisTarget; +{ + return NO; +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + textureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + +} + +#pragma mark - +#pragma mark Accessors + +- (void)setHasAudioTrack:(BOOL)newValue +{ + [self setHasAudioTrack:newValue audioSettings:nil]; +} + +- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings; +{ + _hasAudioTrack = newValue; + + if (_hasAudioTrack) + { + if (_shouldPassthroughAudio) + { + // Do not set any settings so audio will be the same as passthrough + audioOutputSettings = nil; + } + else if (audioOutputSettings == nil) + { +// double preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate]; + double preferredHardwareSampleRate = 48000; // ? - TODO: Fix this, because it's probably broken + + AudioChannelLayout acl; + bzero( &acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + + audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey, + [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, + [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey, + [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, + //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey, + [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, + nil]; +/* + AudioChannelLayout acl; + bzero( &acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + + audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, + [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, + [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, + [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, + [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, + nil];*/ + } + + assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings]; + [assetWriter addInput:assetWriterAudioInput]; + assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo; + } + else + { + // Remove audio track if it exists + } +} + + +@end diff --git a/GPUImage/Source/Mac/GPUImagePicture.h b/GPUImage/Source/Mac/GPUImagePicture.h new file mode 100755 index 0000000..fa25846 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImagePicture.h @@ -0,0 +1,23 @@ +#import +#import "GPUImageOutput.h" + +@interface GPUImagePicture : GPUImageOutput +{ + CGSize pixelSizeOfImage; + BOOL hasProcessedImage; + + dispatch_semaphore_t imageUpdateSemaphore; +} + +// Initialization and teardown +- (id)initWithURL:(NSURL *)url; +- (id)initWithImage:(NSImage *)newImageSource; +- (id)initWithCGImage:(CGImageRef)newImageSource; +- (id)initWithImage:(NSImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; +- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; + +// Image rendering +- (void)processImage; +- (CGSize)outputImageSize; + +@end diff --git a/GPUImage/Source/Mac/GPUImagePicture.m b/GPUImage/Source/Mac/GPUImagePicture.m new file mode 100755 index 0000000..64ed22c --- /dev/null +++ b/GPUImage/Source/Mac/GPUImagePicture.m @@ -0,0 +1,254 @@ +#import "GPUImagePicture.h" + +@implementation GPUImagePicture + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithURL:(NSURL *)url; +{ + NSData *imageData = [[NSData alloc] initWithContentsOfURL:url]; + + if (!(self = [self initWithData:imageData])) + { + return nil; + } + + return self; +} + +- (id)initWithData:(NSData *)imageData; +{ + NSImage *inputImage = [[NSImage alloc] initWithData:imageData]; + + if (!(self = [self initWithImage:inputImage])) + { + return nil; + } + + return self; +} + +- (id)initWithImage:(NSImage *)newImageSource; +{ + if (!(self = [self initWithImage:newImageSource smoothlyScaleOutput:NO])) + { + return nil; + } + + return self; +} + +- (id)initWithCGImage:(CGImageRef)newImageSource; +{ + if (!(self = [self initWithCGImage:newImageSource smoothlyScaleOutput:NO])) + { + return nil; + } + return self; +} + +- (id)initWithImage:(NSImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; +{ + return [self initWithCGImage:[newImageSource CGImageForProposedRect:NULL context:NULL hints:nil] smoothlyScaleOutput:smoothlyScaleOutput]; +} + +- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; +{ + if (!(self = [super init])) + { + return nil; + } + + hasProcessedImage = NO; + self.shouldSmoothlyScaleOutput = smoothlyScaleOutput; + imageUpdateSemaphore = dispatch_semaphore_create(1); + + // TODO: Dispatch this whole thing asynchronously to move image loading off main thread + CGFloat widthOfImage = CGImageGetWidth(newImageSource); + CGFloat heightOfImage = CGImageGetHeight(newImageSource); + pixelSizeOfImage = CGSizeMake(widthOfImage, heightOfImage); + CGSize pixelSizeToUseForTexture = pixelSizeOfImage; + + BOOL shouldRedrawUsingCoreGraphics = YES; + + // For now, deal with images larger than the maximum texture size by resizing to be within that limit + CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:pixelSizeOfImage]; + if (!CGSizeEqualToSize(scaledImageSizeToFitOnGPU, pixelSizeOfImage)) + { + pixelSizeOfImage = scaledImageSizeToFitOnGPU; + pixelSizeToUseForTexture = pixelSizeOfImage; + shouldRedrawUsingCoreGraphics = YES; + } + + if (self.shouldSmoothlyScaleOutput) + { + // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill + CGFloat powerClosestToWidth = ceil(log2(pixelSizeOfImage.width)); + CGFloat powerClosestToHeight = ceil(log2(pixelSizeOfImage.height)); + + pixelSizeToUseForTexture = CGSizeMake(pow(2.0, powerClosestToWidth), pow(2.0, powerClosestToHeight)); + + shouldRedrawUsingCoreGraphics = YES; + } + + GLubyte *imageData = NULL; + CFDataRef dataFromImageDataProvider; + + // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); + + if (shouldRedrawUsingCoreGraphics) + { + // For resized image, redraw + imageData = (GLubyte *) calloc(1, (int)pixelSizeToUseForTexture.width * (int)pixelSizeToUseForTexture.height * 4); + + CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB(); + + CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)pixelSizeToUseForTexture.width, (size_t)pixelSizeToUseForTexture.height, 8, (size_t)pixelSizeToUseForTexture.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); + // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html + CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeToUseForTexture.width, pixelSizeToUseForTexture.height), newImageSource); + CGContextRelease(imageContext); + CGColorSpaceRelease(genericRGBColorspace); + } + else + { + // Access the raw image bytes directly + dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource)); + imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider); + } + + // elapsedTime = (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0; + // NSLog(@"Core Graphics drawing time: %f", elapsedTime); + + // CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f; + // NSUInteger totalNumberOfPixels = round(pixelSizeToUseForTexture.width * pixelSizeToUseForTexture.height); + // + // for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++) + // { + // currentBlueTotal += (CGFloat)imageData[(currentPixel * 4)] / 255.0f; + // currentGreenTotal += (CGFloat)imageData[(currentPixel * 4) + 1] / 255.0f; + // currentRedTotal += (CGFloat)imageData[(currentPixel * 4 + 2)] / 255.0f; + // currentAlphaTotal += (CGFloat)imageData[(currentPixel * 4) + 3] / 255.0f; + // } + // + // NSLog(@"Debug, average input image red: %f, green: %f, blue: %f, alpha: %f", currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels); + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + [self initializeOutputTextureIfNeeded]; + + glBindTexture(GL_TEXTURE_2D, outputTexture); + if (self.shouldSmoothlyScaleOutput) + { + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); + } + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeToUseForTexture.width, (int)pixelSizeToUseForTexture.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, imageData); + + if (self.shouldSmoothlyScaleOutput) + { + glGenerateMipmap(GL_TEXTURE_2D); + } + }); + + if (shouldRedrawUsingCoreGraphics) + { + free(imageData); + } + else + { + CFRelease(dataFromImageDataProvider); + } + + return self; +} + +// ARC forbids explicit message send of 'release' on Mountain Lion, but needs this on Lion and older +#if ( (MAC_OS_X_VERSION_MIN_REQUIRED < __MAC_10_8) || (!defined(__MAC_10_8)) ) +- (void)dealloc; +{ + if (imageUpdateSemaphore != NULL) + { + dispatch_release(imageUpdateSemaphore); + } +} +#endif + +#pragma mark - +#pragma mark Image rendering + +- (void)initializeOutputTextureIfNeeded; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + if (!outputTexture) + { + [GPUImageContext useImageProcessingContext]; + + glActiveTexture(GL_TEXTURE0); + glGenTextures(1, &outputTexture); + glBindTexture(GL_TEXTURE_2D, outputTexture); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + // This is necessary for non-power-of-two textures + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glBindTexture(GL_TEXTURE_2D, 0); + } + }); +} + +- (void)removeAllTargets; +{ + [super removeAllTargets]; + hasProcessedImage = NO; +} + +- (void)processImage; +{ + hasProcessedImage = YES; + + // dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_FOREVER); + + if (dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_NOW) != 0) + { + NSLog(@"Bailing on the image upload semaphore"); + return; + } + + runAsynchronouslyOnVideoProcessingQueue(^{ + +// if (MAX(pixelSizeOfImage.width, pixelSizeOfImage.height) > 1000.0) +// { +// [self conserveMemoryForNextFrame]; +// } + + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget]; + [currentTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureIndexOfTarget]; + } + + dispatch_semaphore_signal(imageUpdateSemaphore); + }); +} + +- (CGSize)outputImageSize; +{ + return pixelSizeOfImage; +} + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + [super addTarget:newTarget atTextureLocation:textureLocation]; + + if (hasProcessedImage) + { + [newTarget setInputSize:pixelSizeOfImage atIndex:textureLocation]; + [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation]; + } +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/Mac/GPUImageView.h b/GPUImage/Source/Mac/GPUImageView.h new file mode 100755 index 0000000..8852f31 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageView.h @@ -0,0 +1,40 @@ +#import +#import "GPUImageContext.h" + +typedef enum { + kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio + kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color + kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view +} GPUImageFillModeType; + +/** + UIView subclass to use as an endpoint for displaying GPUImage outputs + */ +@interface GPUImageView : NSOpenGLView +{ + GPUImageRotationMode inputRotation; + __unsafe_unretained id textureDelegate; +} + +/** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio + */ +@property(readwrite, nonatomic) GPUImageFillModeType fillMode; + +/** This calculates the current display size, in pixels, taking into account Retina scaling factors + */ +@property(readonly, nonatomic) CGSize sizeInPixels; + +@property(nonatomic) BOOL enabled; + +/** Handling fill mode + + @param redComponent Red component for background color + @param greenComponent Green component for background color + @param blueComponent Blue component for background color + @param alphaComponent Alpha component for background color + */ +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; + +@end diff --git a/GPUImage/Source/Mac/GPUImageView.m b/GPUImage/Source/Mac/GPUImageView.m new file mode 100755 index 0000000..f48d231 --- /dev/null +++ b/GPUImage/Source/Mac/GPUImageView.m @@ -0,0 +1,458 @@ +#import "GPUImageView.h" +#import +#import "GPUImageContext.h" +#import "GPUImageFilter.h" +#import + +#pragma mark - +#pragma mark Private methods and instance variables + +@interface GPUImageView () +{ + GLuint inputTextureForDisplay; + + GLProgram *displayProgram; + GLint displayPositionAttribute, displayTextureCoordinateAttribute; + GLint displayInputTextureUniform; + + CGSize inputImageSize; + GLfloat imageVertices[8]; + GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha; +} + +// Initialization and teardown +- (void)commonInit; + +// Managing the display FBOs +- (void)createDisplayFramebuffer; +- (void)destroyDisplayFramebuffer; + +// Handling fill mode +- (void)recalculateViewGeometry; + +@end + +@implementation GPUImageView + +@synthesize sizeInPixels = _sizeInPixels; +@synthesize fillMode = _fillMode; +@synthesize enabled; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithFrame:(CGRect)frame +{ + if (!(self = [super initWithFrame:frame])) + { + return nil; + } + + [self commonInit]; + + return self; +} + +-(id)initWithCoder:(NSCoder *)coder +{ + if (!(self = [super initWithCoder:coder])) + { + return nil; + } + + [self commonInit]; + + return self; +} + +//- (void) prepareOpenGL +//{ +// GLint swapInt = 1; +// [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; // set to vbl sync +//} + +- (void)commonInit; +{ + // I believe each of these views needs a separate OpenGL context, unlike on iOS where you're rendering to an FBO in a layer +// NSOpenGLPixelFormatAttribute pixelFormatAttributes[] = { +// NSOpenGLPFADoubleBuffer, +// NSOpenGLPFAAccelerated, 0, +// 0 +// }; +// +// NSOpenGLPixelFormat *pixelFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:pixelFormatAttributes]; +// if (pixelFormat == nil) +// { +// NSLog(@"Error: No appropriate pixel format found"); +// } +// // TODO: Take into account the sharegroup +// NSOpenGLContext *context = [[NSOpenGLContext alloc] initWithFormat:pixelFormat shareContext:[[GPUImageContext sharedImageProcessingContext] context]]; +// if (context == nil) +// { +// NSAssert(NO, @"Problem creating the GPUImageView context"); +// } +// [self setOpenGLContext:context]; + [self setOpenGLContext:[[GPUImageContext sharedImageProcessingContext] context]]; + + + inputRotation = kGPUImageNoRotation; + self.hidden = NO; + + self.enabled = YES; + + runSynchronouslyOnVideoProcessingQueue(^{ + [self.openGLContext makeCurrentContext]; + displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; + +// displayProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; + if (!displayProgram.initialized) + { + [displayProgram addAttribute:@"position"]; + [displayProgram addAttribute:@"inputTextureCoordinate"]; + + if (![displayProgram link]) + { + NSString *progLog = [displayProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [displayProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [displayProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + displayProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + displayPositionAttribute = [displayProgram attributeIndex:@"position"]; + displayTextureCoordinateAttribute = [displayProgram attributeIndex:@"inputTextureCoordinate"]; + displayInputTextureUniform = [displayProgram uniformIndex:@"inputImageTexture"]; + + [GPUImageContext setActiveShaderProgram:displayProgram]; + +// [displayProgram use]; + glEnableVertexAttribArray(displayPositionAttribute); + glEnableVertexAttribArray(displayTextureCoordinateAttribute); + + [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0]; + _fillMode = kGPUImageFillModePreserveAspectRatio; + [self createDisplayFramebuffer]; + }); + +} + +- (void)dealloc +{ +} + +#pragma mark - +#pragma mark Managing the display FBOs + +- (void)createDisplayFramebuffer; +{ + // Perhaps I'll use an FBO at some time later, but for now will render directly to the screen + _sizeInPixels.width = self.bounds.size.width; + _sizeInPixels.height = self.bounds.size.height; + +// NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight); +} + +- (void)destroyDisplayFramebuffer; +{ + [self.openGLContext makeCurrentContext]; +} + +- (void)setDisplayFramebuffer; +{ + glBindFramebuffer(GL_FRAMEBUFFER, 0); + glBindRenderbuffer(GL_RENDERBUFFER, 0); + + glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height); +} + +- (void)presentFramebuffer; +{ + [self.openGLContext flushBuffer]; +} + +- (void)reshape; +{ + if ( (_sizeInPixels.width == self.bounds.size.width) && (_sizeInPixels.height == self.bounds.size.height) ) + { + return; + } + + _sizeInPixels.width = self.bounds.size.width; + _sizeInPixels.height = self.bounds.size.height; + [self recalculateViewGeometry]; + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ + [self newFrameReadyAtTime:kCMTimeInvalid atIndex:0]; + }); +} + +#pragma mark - +#pragma mark Handling fill mode + +- (void)recalculateViewGeometry; +{ +// runSynchronouslyOnVideoProcessingQueue(^{ + CGFloat heightScaling, widthScaling; + + CGSize currentViewSize = self.bounds.size; + + // CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height; + // CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height; + + CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds); + + switch(_fillMode) + { + case kGPUImageFillModeStretch: + { + widthScaling = 1.0; + heightScaling = 1.0; + }; break; + case kGPUImageFillModePreserveAspectRatio: + { + widthScaling = insetRect.size.width / currentViewSize.width; + heightScaling = insetRect.size.height / currentViewSize.height; + }; break; + case kGPUImageFillModePreserveAspectRatioAndFill: + { + // CGFloat widthHolder = insetRect.size.width / currentViewSize.width; + widthScaling = currentViewSize.height / insetRect.size.height; + heightScaling = currentViewSize.width / insetRect.size.width; + }; break; + } + + imageVertices[0] = -widthScaling; + imageVertices[1] = -heightScaling; + imageVertices[2] = widthScaling; + imageVertices[3] = -heightScaling; + imageVertices[4] = -widthScaling; + imageVertices[5] = heightScaling; + imageVertices[6] = widthScaling; + imageVertices[7] = heightScaling; +// }); + +// static const GLfloat imageVertices[] = { +// -1.0f, -1.0f, +// 1.0f, -1.0f, +// -1.0f, 1.0f, +// 1.0f, 1.0f, +// }; +} + +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; +{ + backgroundColorRed = redComponent; + backgroundColorGreen = greenComponent; + backgroundColorBlue = blueComponent; + backgroundColorAlpha = alphaComponent; +} + ++ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode; +{ +// static const GLfloat noRotationTextureCoordinates[] = { +// 0.0f, 0.0f, +// 1.0f, 0.0f, +// 0.0f, 1.0f, +// 1.0f, 1.0f, +// }; + + static const GLfloat noRotationTextureCoordinates[] = { + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f, + }; + + static const GLfloat rotateRightTextureCoordinates[] = { + 1.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 0.0f, 0.0f, + }; + + static const GLfloat rotateLeftTextureCoordinates[] = { + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f, + }; + + static const GLfloat verticalFlipTextureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat horizontalFlipTextureCoordinates[] = { + 1.0f, 1.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 0.0f, + }; + + static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = { + 1.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 0.0f, 1.0f, + }; + + static const GLfloat rotate180TextureCoordinates[] = { + 1.0f, 0.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 1.0f, + }; + + switch(rotationMode) + { + case kGPUImageNoRotation: return noRotationTextureCoordinates; + case kGPUImageRotateLeft: return rotateLeftTextureCoordinates; + case kGPUImageRotateRight: return rotateRightTextureCoordinates; + case kGPUImageFlipVertical: return verticalFlipTextureCoordinates; + case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates; + case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates; + case kGPUImageRotate180: return rotate180TextureCoordinates; + } +} + +#pragma mark - +#pragma mark GPUInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + runSynchronouslyOnVideoProcessingQueue(^{ +// [[self openGLContext] makeCurrentContext]; + [GPUImageContext setActiveShaderProgram:displayProgram]; + [self setDisplayFramebuffer]; +// [displayProgram use]; + +// glMatrixMode(GL_MODELVIEW); +// glLoadIdentity(); +// +// glMatrixMode(GL_PROJECTION); +// glLoadIdentity(); + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay); + glUniform1i(displayInputTextureUniform, 4); + + glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices); + glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageView textureCoordinatesForRotation:inputRotation]); + + [self lockFocus]; + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + [self presentFramebuffer]; + glBindTexture(GL_TEXTURE_2D, 0); + [self unlockFocus]; + }); +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + inputTextureForDisplay = newInputTexture; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = newInputRotation; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + CGSize rotatedSize = newSize; + + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + rotatedSize.width = newSize.height; + rotatedSize.height = newSize.width; + } + + if (!CGSizeEqualToSize(inputImageSize, rotatedSize)) + { + inputImageSize = rotatedSize; + [self recalculateViewGeometry]; + } + }); +} + +- (CGSize)maximumOutputSize; +{ + if ([self respondsToSelector:@selector(setContentScaleFactor:)]) + { + CGSize pointSize = self.bounds.size; + // TODO: Account for Retina displays + return pointSize; +// return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height); + } + else + { + return self.bounds.size; + } +} + +- (void)endProcessing +{ +} + +- (BOOL)shouldIgnoreUpdatesToThisTarget; +{ + return NO; +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + textureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + +} + +#pragma mark - +#pragma mark Accessors + +- (CGSize)sizeInPixels; +{ + if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero)) + { + return [self maximumOutputSize]; + } + else + { + return _sizeInPixels; + } +} + +- (void)setFillMode:(GPUImageFillModeType)newValue; +{ + _fillMode = newValue; + [self recalculateViewGeometry]; +} + +@end diff --git a/GPUImage/Source/Mac/en.lproj/InfoPlist.strings b/GPUImage/Source/Mac/en.lproj/InfoPlist.strings new file mode 100644 index 0000000..477b28f --- /dev/null +++ b/GPUImage/Source/Mac/en.lproj/InfoPlist.strings @@ -0,0 +1,2 @@ +/* Localized versions of Info.plist keys */ + diff --git a/GPUImage/Source/iOS/GPUImage-Prefix.pch b/GPUImage/Source/iOS/GPUImage-Prefix.pch new file mode 100755 index 0000000..ab220b4 --- /dev/null +++ b/GPUImage/Source/iOS/GPUImage-Prefix.pch @@ -0,0 +1,7 @@ +// +// Prefix header for all source files of the 'GPUImage' target in the 'GPUImage' project +// + +#ifdef __OBJC__ + #import +#endif diff --git a/GPUImage/Source/iOS/GPUImageContext.h b/GPUImage/Source/iOS/GPUImageContext.h new file mode 100755 index 0000000..4491ccf --- /dev/null +++ b/GPUImage/Source/iOS/GPUImageContext.h @@ -0,0 +1,63 @@ +#import +#import +#import +#import +#import +#import +#import "GLProgram.h" + +#define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal) + +typedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 } GPUImageRotationMode; + +@interface GPUImageContext : NSObject + +@property(readonly, nonatomic) dispatch_queue_t contextQueue; +@property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram; +@property(readonly, retain, nonatomic) EAGLContext *context; + ++ (void *)contextKey; ++ (GPUImageContext *)sharedImageProcessingContext; ++ (dispatch_queue_t)sharedContextQueue; ++ (void)useImageProcessingContext; ++ (void)setActiveShaderProgram:(GLProgram *)shaderProgram; ++ (GLint)maximumTextureSizeForThisDevice; ++ (GLint)maximumTextureUnitsForThisDevice; ++ (GLint)maximumVaryingVectorsForThisDevice; ++ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; ++ (BOOL)deviceSupportsRedTextures; ++ (BOOL)deviceSupportsFramebufferReads; ++ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; + +- (void)presentBufferForDisplay; +- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; + +- (void)useSharegroup:(EAGLSharegroup *)sharegroup; + +// Manage fast texture upload ++ (BOOL)supportsFastTextureUpload; + +@end + +@protocol GPUImageTextureDelegate; + +@protocol GPUImageInput +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +- (NSInteger)nextAvailableTextureIndex; +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +- (CGSize)maximumOutputSize; +- (void)endProcessing; +- (BOOL)shouldIgnoreUpdatesToThisTarget; +- (BOOL)enabled; +- (void)conserveMemoryForNextFrame; +- (BOOL)wantsMonochromeInput; +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +@end + +@protocol GPUImageTextureDelegate +- (void)textureNoLongerNeededForTarget:(id)textureTarget; +@end + diff --git a/GPUImage/Source/iOS/GPUImageContext.m b/GPUImage/Source/iOS/GPUImageContext.m new file mode 100755 index 0000000..6f1d608 --- /dev/null +++ b/GPUImage/Source/iOS/GPUImageContext.m @@ -0,0 +1,267 @@ +#import "GPUImageContext.h" +#import +#import + +#define MAXSHADERPROGRAMSALLOWEDINCACHE 40 + +@interface GPUImageContext() +{ + NSMutableDictionary *shaderProgramCache; + NSMutableArray *shaderProgramUsageHistory; + EAGLSharegroup *_sharegroup; +} + +@end + +@implementation GPUImageContext + +@synthesize context = _context; +@synthesize currentShaderProgram = _currentShaderProgram; +@synthesize contextQueue = _contextQueue; + +static void *openGLESContextQueueKey; + +- (id)init; +{ + if (!(self = [super init])) + { + return nil; + } + + openGLESContextQueueKey = &openGLESContextQueueKey; + _contextQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.openGLESContextQueue", NULL); + +#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0)) +#else + dispatch_queue_set_specific(_contextQueue, openGLESContextQueueKey, (__bridge void *)self, NULL); +#endif + shaderProgramCache = [[NSMutableDictionary alloc] init]; + shaderProgramUsageHistory = [[NSMutableArray alloc] init]; + + return self; +} + ++ (void *)contextKey { + return openGLESContextQueueKey; +} + +// Based on Colin Wheeler's example here: http://cocoasamurai.blogspot.com/2011/04/singletons-your-doing-them-wrong.html ++ (GPUImageContext *)sharedImageProcessingContext; +{ + static dispatch_once_t pred; + static GPUImageContext *sharedImageProcessingContext = nil; + + dispatch_once(&pred, ^{ + sharedImageProcessingContext = [[[self class] alloc] init]; + }); + return sharedImageProcessingContext; +} + ++ (dispatch_queue_t)sharedContextQueue; +{ + return [[self sharedImageProcessingContext] contextQueue]; +} + ++ (void)useImageProcessingContext; +{ + EAGLContext *imageProcessingContext = [[GPUImageContext sharedImageProcessingContext] context]; + if ([EAGLContext currentContext] != imageProcessingContext) + { + [EAGLContext setCurrentContext:imageProcessingContext]; + } +} + ++ (void)setActiveShaderProgram:(GLProgram *)shaderProgram; +{ + GPUImageContext *sharedContext = [GPUImageContext sharedImageProcessingContext]; + EAGLContext *imageProcessingContext = [sharedContext context]; + if ([EAGLContext currentContext] != imageProcessingContext) + { + [EAGLContext setCurrentContext:imageProcessingContext]; + } + + if (sharedContext.currentShaderProgram != shaderProgram) + { + sharedContext.currentShaderProgram = shaderProgram; + [shaderProgram use]; + } +} + ++ (GLint)maximumTextureSizeForThisDevice; +{ + static dispatch_once_t pred; + static GLint maxTextureSize = 0; + + dispatch_once(&pred, ^{ + [self useImageProcessingContext]; + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize); + }); + + return maxTextureSize; +} + ++ (GLint)maximumTextureUnitsForThisDevice; +{ + static dispatch_once_t pred; + static GLint maxTextureUnits = 0; + + dispatch_once(&pred, ^{ + [self useImageProcessingContext]; + glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits); + }); + + return maxTextureUnits; +} + ++ (GLint)maximumVaryingVectorsForThisDevice; +{ + static dispatch_once_t pred; + static GLint maxVaryingVectors = 0; + + dispatch_once(&pred, ^{ + [self useImageProcessingContext]; + glGetIntegerv(GL_MAX_VARYING_VECTORS, &maxVaryingVectors); + }); + + return maxVaryingVectors; +} + ++ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; +{ + static dispatch_once_t pred; + static NSArray *extensionNames = nil; + + // Cache extensions for later quick reference, since this won't change for a given device + dispatch_once(&pred, ^{ + [GPUImageContext useImageProcessingContext]; + NSString *extensionsString = [NSString stringWithCString:(const char *)glGetString(GL_EXTENSIONS) encoding:NSASCIIStringEncoding]; + extensionNames = [extensionsString componentsSeparatedByString:@" "]; + }); + + return [extensionNames containsObject:extension]; +} + + +// http://www.khronos.org/registry/gles/extensions/EXT/EXT_texture_rg.txt + ++ (BOOL)deviceSupportsRedTextures; +{ + static dispatch_once_t pred; + static BOOL supportsRedTextures = NO; + + dispatch_once(&pred, ^{ + supportsRedTextures = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_texture_rg"]; + }); + + return supportsRedTextures; +} + ++ (BOOL)deviceSupportsFramebufferReads; +{ + static dispatch_once_t pred; + static BOOL supportsFramebufferReads = NO; + + dispatch_once(&pred, ^{ + supportsFramebufferReads = [GPUImageContext deviceSupportsOpenGLESExtension:@"GL_EXT_shader_framebuffer_fetch"]; + }); + + return supportsFramebufferReads; +} + ++ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; +{ + GLint maxTextureSize = [self maximumTextureSizeForThisDevice]; + if ( (inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize) ) + { + return inputSize; + } + + CGSize adjustedSize; + if (inputSize.width > inputSize.height) + { + adjustedSize.width = (CGFloat)maxTextureSize; + adjustedSize.height = ((CGFloat)maxTextureSize / inputSize.width) * inputSize.height; + } + else + { + adjustedSize.height = (CGFloat)maxTextureSize; + adjustedSize.width = ((CGFloat)maxTextureSize / inputSize.height) * inputSize.width; + } + + return adjustedSize; +} + +- (void)presentBufferForDisplay; +{ + [self.context presentRenderbuffer:GL_RENDERBUFFER]; +} + +- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; +{ + NSString *lookupKeyForShaderProgram = [NSString stringWithFormat:@"V: %@ - F: %@", vertexShaderString, fragmentShaderString]; + GLProgram *programFromCache = [shaderProgramCache objectForKey:lookupKeyForShaderProgram]; + + if (programFromCache == nil) + { + programFromCache = [[GLProgram alloc] initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]; + [shaderProgramCache setObject:programFromCache forKey:lookupKeyForShaderProgram]; +// [shaderProgramUsageHistory addObject:lookupKeyForShaderProgram]; +// if ([shaderProgramUsageHistory count] >= MAXSHADERPROGRAMSALLOWEDINCACHE) +// { +// for (NSUInteger currentShaderProgramRemovedFromCache = 0; currentShaderProgramRemovedFromCache < 10; currentShaderProgramRemovedFromCache++) +// { +// NSString *shaderProgramToRemoveFromCache = [shaderProgramUsageHistory objectAtIndex:0]; +// [shaderProgramUsageHistory removeObjectAtIndex:0]; +// [shaderProgramCache removeObjectForKey:shaderProgramToRemoveFromCache]; +// } +// } + } + + return programFromCache; +} + +- (void)useSharegroup:(EAGLSharegroup *)sharegroup; +{ + NSAssert(_context == nil, @"Unable to use a share group when the context has already been created. Call this method before you use the context for the first time."); + + _sharegroup = sharegroup; +} + +- (EAGLContext *)createContext; +{ + EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:_sharegroup]; + NSAssert(context != nil, @"Unable to create an OpenGL ES 2.0 context. The GPUImage framework requires OpenGL ES 2.0 support to work."); + return context; +} + + +#pragma mark - +#pragma mark Manage fast texture upload + ++ (BOOL)supportsFastTextureUpload; +{ +#if TARGET_IPHONE_SIMULATOR + return NO; +#else + return (CVOpenGLESTextureCacheCreate != NULL); +#endif +} + +#pragma mark - +#pragma mark Accessors + +- (EAGLContext *)context; +{ + if (_context == nil) + { + _context = [self createContext]; + [EAGLContext setCurrentContext:_context]; + + // Set up a few global settings for the image processing pipeline + glDisable(GL_DEPTH_TEST); + } + + return _context; +} + +@end diff --git a/GPUImage/Source/iOS/GPUImageMovieWriter.h b/GPUImage/Source/iOS/GPUImageMovieWriter.h new file mode 100755 index 0000000..9662146 --- /dev/null +++ b/GPUImage/Source/iOS/GPUImageMovieWriter.h @@ -0,0 +1,68 @@ +#import +#import +#import "GPUImageContext.h" + +extern NSString *const kGPUImageColorSwizzlingFragmentShaderString; + +@protocol GPUImageMovieWriterDelegate + +@optional +- (void)movieRecordingCompleted; +- (void)movieRecordingFailedWithError:(NSError*)error; + +@end + +@interface GPUImageMovieWriter : NSObject +{ + BOOL alreadyFinishedRecording; + + NSURL *movieURL; + NSString *fileType; + AVAssetWriter *assetWriter; + AVAssetWriterInput *assetWriterAudioInput; + AVAssetWriterInput *assetWriterVideoInput; + AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput; + dispatch_queue_t movieWritingQueue; + + CVOpenGLESTextureCacheRef coreVideoTextureCache; + CVPixelBufferRef renderTarget; + CVOpenGLESTextureRef renderTexture; + + CGSize videoSize; + GPUImageRotationMode inputRotation; + + __unsafe_unretained id textureDelegate; +} + +@property(readwrite, nonatomic) BOOL hasAudioTrack; +@property(readwrite, nonatomic) BOOL shouldPassthroughAudio; +@property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone; +@property(nonatomic, copy) void(^completionBlock)(void); +@property(nonatomic, copy) void(^failureBlock)(NSError*); +@property(nonatomic, assign) id delegate; +@property(readwrite, nonatomic) BOOL encodingLiveVideo; +@property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void); +@property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void); +@property(nonatomic) BOOL enabled; +@property(nonatomic, readonly) AVAssetWriter *assetWriter; +@property(nonatomic, readonly) CMTime duration; +@property(nonatomic, assign) CGAffineTransform transform; +@property(nonatomic, copy) NSArray *metaData; +@property(nonatomic, assign, getter = isPaused) BOOL paused; + +// Initialization and teardown +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize; +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings; + +- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings; + +// Movie recording +- (void)startRecording; +- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform; +- (void)finishRecording; +- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler; +- (void)cancelRecording; +- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer; +- (void)enableSynchronizationCallbacks; + +@end diff --git a/GPUImage/Source/iOS/GPUImageMovieWriter.m b/GPUImage/Source/iOS/GPUImageMovieWriter.m new file mode 100755 index 0000000..fdbbef5 --- /dev/null +++ b/GPUImage/Source/iOS/GPUImageMovieWriter.m @@ -0,0 +1,887 @@ +#import "GPUImageMovieWriter.h" + +#import "GPUImageContext.h" +#import "GLProgram.h" +#import "GPUImageFilter.h" + +NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra; + } +); + + +@interface GPUImageMovieWriter () +{ + GLuint movieFramebuffer, movieRenderbuffer; + + GLProgram *colorSwizzlingProgram; + GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute; + GLint colorSwizzlingInputTextureUniform; + + GLuint inputTextureForMovieRendering; + + CMTime startTime, previousFrameTime, previousAudioTime; + + dispatch_queue_t audioQueue, videoQueue; + BOOL audioEncodingIsFinished, videoEncodingIsFinished; + + BOOL isRecording; +} + +// Movie recording +- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings; + +// Frame rendering +- (void)createDataFBO; +- (void)destroyDataFBO; +- (void)setFilterFBO; + +- (void)renderAtInternalSize; + +@end + +@implementation GPUImageMovieWriter + +@synthesize hasAudioTrack = _hasAudioTrack; +@synthesize encodingLiveVideo = _encodingLiveVideo; +@synthesize shouldPassthroughAudio = _shouldPassthroughAudio; +@synthesize completionBlock; +@synthesize failureBlock; +@synthesize videoInputReadyCallback; +@synthesize audioInputReadyCallback; +@synthesize enabled; +@synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone; +@synthesize paused = _paused; + +@synthesize delegate = _delegate; + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize; +{ + return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil]; +} + +- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings; +{ + if (!(self = [super init])) + { + return nil; + } + + _shouldInvalidateAudioSampleWhenDone = NO; + + self.enabled = YES; + alreadyFinishedRecording = NO; + videoEncodingIsFinished = NO; + audioEncodingIsFinished = NO; + + movieWritingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.movieWritingQueue", NULL); + + videoSize = newSize; + movieURL = newMovieURL; + fileType = newFileType; + startTime = kCMTimeInvalid; + _encodingLiveVideo = [[outputSettings objectForKey:@"EncodingLiveVideo"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@"EncodingLiveVideo"] boolValue] : YES; + previousFrameTime = kCMTimeNegativeInfinity; + previousAudioTime = kCMTimeNegativeInfinity; + inputRotation = kGPUImageNoRotation; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if ([GPUImageContext supportsFastTextureUpload]) + { + colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; + } + else + { + colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString]; + } + + if (!colorSwizzlingProgram.initialized) + { + [colorSwizzlingProgram addAttribute:@"position"]; + [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"]; + + if (![colorSwizzlingProgram link]) + { + NSString *progLog = [colorSwizzlingProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [colorSwizzlingProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + colorSwizzlingProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"]; + colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"]; + colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"]; + + // REFACTOR: Wrap this in a block for the image processing queue + [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram]; + + glEnableVertexAttribArray(colorSwizzlingPositionAttribute); + glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute); + }); + + [self initializeMovieWithOutputSettings:outputSettings]; + + return self; +} + +- (void)dealloc; +{ + [self destroyDataFBO]; + +#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) ) + if (movieWritingQueue != NULL) + { + dispatch_release(movieWritingQueue); + } + if( audioQueue != NULL ) + { + dispatch_release(audioQueue); + } + if( videoQueue != NULL ) + { + dispatch_release(videoQueue); + } +#endif +} + +#pragma mark - +#pragma mark Movie recording + +- (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings; +{ + isRecording = NO; + + self.enabled = YES; + NSError *error = nil; + assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error]; + if (error != nil) + { + NSLog(@"Error: %@", error); + if (failureBlock) + { + failureBlock(error); + } + else + { + if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)]) + { + [self.delegate movieRecordingFailedWithError:error]; + } + } + } + + // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000); + + // use default output settings if none specified + if (outputSettings == nil) + { + NSMutableDictionary *settings = [[NSMutableDictionary alloc] init]; + [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey]; + [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey]; + [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey]; + outputSettings = settings; + } + // custom output settings specified + else + { + NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey]; + NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey]; + NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey]; + + NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters."); + + if( [outputSettings objectForKey:@"EncodingLiveVideo"] ) { + NSMutableDictionary *tmp = [outputSettings mutableCopy]; + [tmp removeObjectForKey:@"EncodingLiveVideo"]; + outputSettings = tmp; + } + } + + /* + NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey, + [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey, + [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey, + [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey, + nil]; + + NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey, + [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey, + nil]; + + NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init]; + [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey]; + [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey]; + [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey]; + [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey]; + [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey]; + + [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey]; + */ + + assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings]; + assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo; + + // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. + NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, + [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey, + [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey, + nil]; +// NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, +// nil]; + + assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary]; + + [assetWriter addInput:assetWriterVideoInput]; +} + +- (void)startRecording; +{ + alreadyFinishedRecording = NO; + isRecording = YES; + startTime = kCMTimeInvalid; + dispatch_sync(movieWritingQueue, ^{ + if (audioInputReadyCallback == NULL) + { + [assetWriter startWriting]; + } + }); + // [assetWriter startSessionAtSourceTime:kCMTimeZero]; +} + +- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform; +{ + assetWriterVideoInput.transform = orientationTransform; + + [self startRecording]; +} + +- (void)cancelRecording; +{ + if (assetWriter.status == AVAssetWriterStatusCompleted) + { + return; + } + + isRecording = NO; + dispatch_sync(movieWritingQueue, ^{ + alreadyFinishedRecording = YES; + + if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished ) + { + videoEncodingIsFinished = YES; + [assetWriterVideoInput markAsFinished]; + } + if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished ) + { + audioEncodingIsFinished = YES; + [assetWriterAudioInput markAsFinished]; + } + [assetWriter cancelWriting]; + }); +} + +- (void)finishRecording; +{ + [self finishRecordingWithCompletionHandler:NULL]; +} + +- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + + dispatch_sync(movieWritingQueue, ^{ + isRecording = NO; + + if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown) + { + if (handler) + runAsynchronouslyOnVideoProcessingQueue(handler); + return; + } + if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished ) + { + videoEncodingIsFinished = YES; + [assetWriterVideoInput markAsFinished]; + } + if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished ) + { + audioEncodingIsFinished = YES; + [assetWriterAudioInput markAsFinished]; + } +#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0)) + // Not iOS 6 SDK + [assetWriter finishWriting]; + if (handler) + runAsynchronouslyOnVideoProcessingQueue(handler); +#else + // iOS 6 SDK + if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) { + // Running iOS 6 + [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })]; + } + else { + // Not running iOS 6 +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + [assetWriter finishWriting]; +#pragma clang diagnostic pop + if (handler) + runAsynchronouslyOnVideoProcessingQueue(handler); + } +#endif + }); + }); +} + +- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer; +{ + if (!isRecording) + { + return; + } + + if (_hasAudioTrack) + { + CFRetain(audioBuffer); + + CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer); + + if (CMTIME_IS_INVALID(startTime)) + { + dispatch_sync(movieWritingQueue, ^{ + if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting)) + { + [assetWriter startWriting]; + } + [assetWriter startSessionAtSourceTime:currentSampleTime]; + startTime = currentSampleTime; + }); + } + + if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo) + { + NSLog(@"1: Had to drop an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime))); + if (_shouldInvalidateAudioSampleWhenDone) + { + CMSampleBufferInvalidate(audioBuffer); + } + CFRelease(audioBuffer); + return; + } + + previousAudioTime = currentSampleTime; + +// NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch); + void(^write)() = ^() { + while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) { + NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5]; + //NSLog(@"audio waiting..."); + [[NSRunLoop currentRunLoop] runUntilDate:maxDate]; + } + if (!assetWriterAudioInput.readyForMoreMediaData) + { + NSLog(@"2: Had to drop an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime))); + } + else if( ! [assetWriterAudioInput appendSampleBuffer:audioBuffer] ) + { + NSLog(@"Problem appending audio buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime))); + } + else + { + //NSLog(@"Wrote an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime))); + } + + if (_shouldInvalidateAudioSampleWhenDone) + { + CMSampleBufferInvalidate(audioBuffer); + } + CFRelease(audioBuffer); + }; + if( _encodingLiveVideo ) + dispatch_async(movieWritingQueue, write); + else + write(); + } +} + +- (void)enableSynchronizationCallbacks; +{ + if (videoInputReadyCallback != NULL) + { + if( assetWriter.status != AVAssetWriterStatusWriting ) + { + [assetWriter startWriting]; + } + videoQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.videoReadingQueue", NULL); + [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{ + if( _paused ) + { + //NSLog(@"video requestMediaDataWhenReadyOnQueue paused"); + // if we don't sleep, we'll get called back almost immediately, chewing up CPU + usleep(10000); + return; + } + //NSLog(@"video requestMediaDataWhenReadyOnQueue begin"); + while( assetWriterVideoInput.readyForMoreMediaData && ! _paused ) + { + if( ! videoInputReadyCallback() && ! videoEncodingIsFinished ) + { + dispatch_async(movieWritingQueue, ^{ + if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished ) + { + videoEncodingIsFinished = YES; + [assetWriterVideoInput markAsFinished]; + } + }); + } + } + //NSLog(@"video requestMediaDataWhenReadyOnQueue end"); + }]; + } + + if (audioInputReadyCallback != NULL) + { + audioQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioReadingQueue", NULL); + [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{ + if( _paused ) + { + //NSLog(@"audio requestMediaDataWhenReadyOnQueue paused"); + // if we don't sleep, we'll get called back almost immediately, chewing up CPU + usleep(10000); + return; + } + //NSLog(@"audio requestMediaDataWhenReadyOnQueue begin"); + while( assetWriterAudioInput.readyForMoreMediaData && ! _paused ) + { + if( ! audioInputReadyCallback() && ! audioEncodingIsFinished ) + { + dispatch_async(movieWritingQueue, ^{ + if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished ) + { + audioEncodingIsFinished = YES; + [assetWriterAudioInput markAsFinished]; + } + }); + } + } + //NSLog(@"audio requestMediaDataWhenReadyOnQueue end"); + }]; + } + +} + +#pragma mark - +#pragma mark Frame rendering + +- (void)createDataFBO; +{ + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &movieFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer); + + if ([GPUImageContext supportsFastTextureUpload]) + { +#if defined(__IPHONE_6_0) + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache); +#else + CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache); +#endif + + if (err) + { + NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); + } + + // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ + + + CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget); + + CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget, + NULL, // texture attributes + GL_TEXTURE_2D, + GL_RGBA, // opengl format + (int)videoSize.width, + (int)videoSize.height, + GL_BGRA, // native iOS format + GL_UNSIGNED_BYTE, + 0, + &renderTexture); + + glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); + } + else + { + glGenRenderbuffers(1, &movieRenderbuffer); + glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer); + glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height); + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer); + } + + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); +} + +- (void)destroyDataFBO; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + if (movieFramebuffer) + { + glDeleteFramebuffers(1, &movieFramebuffer); + movieFramebuffer = 0; + } + + if (movieRenderbuffer) + { + glDeleteRenderbuffers(1, &movieRenderbuffer); + movieRenderbuffer = 0; + } + + if ([GPUImageContext supportsFastTextureUpload]) + { + if (coreVideoTextureCache) + { + CFRelease(coreVideoTextureCache); + } + + if (renderTexture) + { + CFRelease(renderTexture); + } + if (renderTarget) + { + CVPixelBufferRelease(renderTarget); + } + + } + }); +} + +- (void)setFilterFBO; +{ + if (!movieFramebuffer) + { + [self createDataFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer); + + glViewport(0, 0, (int)videoSize.width, (int)videoSize.height); +} + +- (void)renderAtInternalSize; +{ + [GPUImageContext useImageProcessingContext]; + [self setFilterFBO]; + + [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram]; + + glClearColor(1.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + // This needs to be flipped to write out to video correctly + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation]; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, inputTextureForMovieRendering); + glUniform1i(colorSwizzlingInputTextureUniform, 4); + + glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + glFinish(); +} + +#pragma mark - +#pragma mark GPUImageInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + if (!isRecording) + { + return; + } + + // Drop frames forced by images and other things with no time constants + // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case + if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) ) + { + return; + } + + if (CMTIME_IS_INVALID(startTime)) + { + dispatch_sync(movieWritingQueue, ^{ + if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting)) + { + [assetWriter startWriting]; + } + + [assetWriter startSessionAtSourceTime:frameTime]; + startTime = frameTime; + }); + } + + if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo) + { + NSLog(@"1: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime))); + return; + } + + // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames + [GPUImageContext useImageProcessingContext]; + [self renderAtInternalSize]; + + CVPixelBufferRef pixel_buffer = NULL; + + if ([GPUImageContext supportsFastTextureUpload]) + { + pixel_buffer = renderTarget; + CVPixelBufferLockBaseAddress(pixel_buffer, 0); + } + else + { + CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer); + if ((pixel_buffer == NULL) || (status != kCVReturnSuccess)) + { + CVPixelBufferRelease(pixel_buffer); + return; + } + else + { + CVPixelBufferLockBaseAddress(pixel_buffer, 0); + + GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer); + glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData); + } + } + + void(^write)() = ^() { + while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) { + NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1]; + //NSLog(@"video waiting..."); + [[NSRunLoop currentRunLoop] runUntilDate:maxDate]; + } + if (!assetWriterVideoInput.readyForMoreMediaData) + { + NSLog(@"2: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime))); + } + else if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime]) + { + NSLog(@"Problem appending pixel buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime))); + } + else + { + //NSLog(@"Wrote a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime))); + } + CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); + + previousFrameTime = frameTime; + + if (![GPUImageContext supportsFastTextureUpload]) + { + CVPixelBufferRelease(pixel_buffer); + } + }; + + if( _encodingLiveVideo ) + dispatch_async(movieWritingQueue, write); + else + write(); +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + inputTextureForMovieRendering = newInputTexture; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = newInputRotation; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ +} + +- (CGSize)maximumOutputSize; +{ + return videoSize; +} + +- (void)endProcessing +{ + if (completionBlock) + { + if (!alreadyFinishedRecording) + { + alreadyFinishedRecording = YES; + completionBlock(); + } + } + else + { + if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)]) + { + [_delegate movieRecordingCompleted]; + } + } +} + +- (BOOL)shouldIgnoreUpdatesToThisTarget; +{ + return NO; +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + textureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + +} + +#pragma mark - +#pragma mark Accessors + +- (void)setHasAudioTrack:(BOOL)newValue +{ + [self setHasAudioTrack:newValue audioSettings:nil]; +} + +- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings; +{ + _hasAudioTrack = newValue; + + if (_hasAudioTrack) + { + if (_shouldPassthroughAudio) + { + // Do not set any settings so audio will be the same as passthrough + audioOutputSettings = nil; + } + else if (audioOutputSettings == nil) + { + double preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate]; + + AudioChannelLayout acl; + bzero( &acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + + audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey, + [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, + [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey, + [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, + //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey, + [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, + nil]; +/* + AudioChannelLayout acl; + bzero( &acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + + audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: + [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, + [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, + [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, + [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, + [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, + nil];*/ + } + + assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings]; + [assetWriter addInput:assetWriterAudioInput]; + assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo; + } + else + { + // Remove audio track if it exists + } +} + +- (NSArray*)metaData { + return assetWriter.metadata; +} + +- (void)setMetaData:(NSArray*)metaData { + assetWriter.metadata = metaData; +} + +- (CMTime)duration { + if( ! CMTIME_IS_VALID(startTime) ) + return kCMTimeZero; + if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) ) + return CMTimeSubtract(previousFrameTime, startTime); + if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) ) + return CMTimeSubtract(previousAudioTime, startTime); + return kCMTimeZero; +} + +- (CGAffineTransform)transform { + return assetWriterVideoInput.transform; +} + +- (void)setTransform:(CGAffineTransform)transform { + assetWriterVideoInput.transform = transform; +} + +- (AVAssetWriter*)assetWriter { + return assetWriter; +} + +@end diff --git a/GPUImage/Source/iOS/GPUImagePicture.h b/GPUImage/Source/iOS/GPUImagePicture.h new file mode 100755 index 0000000..6274642 --- /dev/null +++ b/GPUImage/Source/iOS/GPUImagePicture.h @@ -0,0 +1,33 @@ +#import +#import "GPUImageOutput.h" + + +@interface GPUImagePicture : GPUImageOutput +{ + CGSize pixelSizeOfImage; + BOOL hasProcessedImage; + + dispatch_semaphore_t imageUpdateSemaphore; +} + +// Initialization and teardown +- (id)initWithURL:(NSURL *)url; +- (id)initWithImage:(UIImage *)newImageSource; +- (id)initWithCGImage:(CGImageRef)newImageSource; +- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; +- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; + +// Image rendering +- (void)processImage; +- (CGSize)outputImageSize; + +/** + * Process image with all targets and filters asynchronously + * The completion handler is called after processing finished in the + * GPU's dispatch queue - and only if this method did not return NO. + * + * @returns NO if resource is blocked and processing is discarded, YES otherwise + */ +- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion; + +@end diff --git a/GPUImage/Source/iOS/GPUImagePicture.m b/GPUImage/Source/iOS/GPUImagePicture.m new file mode 100755 index 0000000..edea1df --- /dev/null +++ b/GPUImage/Source/iOS/GPUImagePicture.m @@ -0,0 +1,292 @@ +#import "GPUImagePicture.h" + +@implementation GPUImagePicture + +#pragma mark - +#pragma mark Initialization and teardown + +- (id)initWithURL:(NSURL *)url; +{ + NSData *imageData = [[NSData alloc] initWithContentsOfURL:url]; + + if (!(self = [self initWithData:imageData])) + { + return nil; + } + + return self; +} + +- (id)initWithData:(NSData *)imageData; +{ + UIImage *inputImage = [[UIImage alloc] initWithData:imageData]; + + if (!(self = [self initWithImage:inputImage])) + { + return nil; + } + + return self; +} + +- (id)initWithImage:(UIImage *)newImageSource; +{ + if (!(self = [self initWithImage:newImageSource smoothlyScaleOutput:NO])) + { + return nil; + } + + return self; +} + +- (id)initWithCGImage:(CGImageRef)newImageSource; +{ + if (!(self = [self initWithCGImage:newImageSource smoothlyScaleOutput:NO])) + { + return nil; + } + return self; +} + +- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; +{ + return [self initWithCGImage:[newImageSource CGImage] smoothlyScaleOutput:smoothlyScaleOutput]; +} + +- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput; +{ + if (!(self = [super init])) + { + return nil; + } + + hasProcessedImage = NO; + self.shouldSmoothlyScaleOutput = smoothlyScaleOutput; + imageUpdateSemaphore = dispatch_semaphore_create(1); + + // TODO: Dispatch this whole thing asynchronously to move image loading off main thread + CGFloat widthOfImage = CGImageGetWidth(newImageSource); + CGFloat heightOfImage = CGImageGetHeight(newImageSource); + + // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. + NSAssert( widthOfImage > 0 && heightOfImage > 0, @"Passed image must not be empty - it should be at least 1px tall and wide"); + + pixelSizeOfImage = CGSizeMake(widthOfImage, heightOfImage); + CGSize pixelSizeToUseForTexture = pixelSizeOfImage; + + BOOL shouldRedrawUsingCoreGraphics = NO; + + // For now, deal with images larger than the maximum texture size by resizing to be within that limit + CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:pixelSizeOfImage]; + if (!CGSizeEqualToSize(scaledImageSizeToFitOnGPU, pixelSizeOfImage)) + { + pixelSizeOfImage = scaledImageSizeToFitOnGPU; + pixelSizeToUseForTexture = pixelSizeOfImage; + shouldRedrawUsingCoreGraphics = YES; + } + + if (self.shouldSmoothlyScaleOutput) + { + // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill + CGFloat powerClosestToWidth = ceil(log2(pixelSizeOfImage.width)); + CGFloat powerClosestToHeight = ceil(log2(pixelSizeOfImage.height)); + + pixelSizeToUseForTexture = CGSizeMake(pow(2.0, powerClosestToWidth), pow(2.0, powerClosestToHeight)); + + shouldRedrawUsingCoreGraphics = YES; + } + + GLubyte *imageData = NULL; + CFDataRef dataFromImageDataProvider; + GLenum format = GL_BGRA; + + if (!shouldRedrawUsingCoreGraphics) { + /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to + * tell GL about the memory layout with GLES. + */ + if (CGImageGetBytesPerRow(newImageSource) != CGImageGetWidth(newImageSource) * 4 || + CGImageGetBitsPerPixel(newImageSource) != 32 || + CGImageGetBitsPerComponent(newImageSource) != 8) + { + shouldRedrawUsingCoreGraphics = YES; + } else { + /* Check that the bitmap pixel format is compatible with GL */ + CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(newImageSource); + if ((bitmapInfo & kCGBitmapFloatComponents) != 0) { + /* We don't support float components for use directly in GL */ + shouldRedrawUsingCoreGraphics = YES; + } else { + CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask; + if (byteOrderInfo == kCGBitmapByteOrder32Little) { + /* Little endian, for alpha-first we can use this bitmap directly in GL */ + CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask; + if (alphaInfo != kCGImageAlphaPremultipliedFirst && alphaInfo != kCGImageAlphaFirst && + alphaInfo != kCGImageAlphaNoneSkipFirst) { + shouldRedrawUsingCoreGraphics = YES; + } + } else if (byteOrderInfo == kCGBitmapByteOrderDefault || byteOrderInfo == kCGBitmapByteOrder32Big) { + /* Big endian, for alpha-last we can use this bitmap directly in GL */ + CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask; + if (alphaInfo != kCGImageAlphaPremultipliedLast && alphaInfo != kCGImageAlphaLast && + alphaInfo != kCGImageAlphaNoneSkipLast) { + shouldRedrawUsingCoreGraphics = YES; + } else { + /* Can access directly using GL_RGBA pixel format */ + format = GL_RGBA; + } + } + } + } + } + + // CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); + + if (shouldRedrawUsingCoreGraphics) + { + // For resized or incompatible image: redraw + imageData = (GLubyte *) calloc(1, (int)pixelSizeToUseForTexture.width * (int)pixelSizeToUseForTexture.height * 4); + + CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB(); + + CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)pixelSizeToUseForTexture.width, (size_t)pixelSizeToUseForTexture.height, 8, (size_t)pixelSizeToUseForTexture.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); + // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html + CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeToUseForTexture.width, pixelSizeToUseForTexture.height), newImageSource); + CGContextRelease(imageContext); + CGColorSpaceRelease(genericRGBColorspace); + } + else + { + // Access the raw image bytes directly + dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource)); + imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider); + } + + // elapsedTime = (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0; + // NSLog(@"Core Graphics drawing time: %f", elapsedTime); + + // CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f; + // NSUInteger totalNumberOfPixels = round(pixelSizeToUseForTexture.width * pixelSizeToUseForTexture.height); + // + // for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++) + // { + // currentBlueTotal += (CGFloat)imageData[(currentPixel * 4)] / 255.0f; + // currentGreenTotal += (CGFloat)imageData[(currentPixel * 4) + 1] / 255.0f; + // currentRedTotal += (CGFloat)imageData[(currentPixel * 4 + 2)] / 255.0f; + // currentAlphaTotal += (CGFloat)imageData[(currentPixel * 4) + 3] / 255.0f; + // } + // + // NSLog(@"Debug, average input image red: %f, green: %f, blue: %f, alpha: %f", currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels); + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + [self initializeOutputTextureIfNeeded]; + + glBindTexture(GL_TEXTURE_2D, outputTexture); + if (self.shouldSmoothlyScaleOutput) + { + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); + } + // no need to use self.outputTextureOptions here since pictures need this texture formats and type + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeToUseForTexture.width, (int)pixelSizeToUseForTexture.height, 0, format, GL_UNSIGNED_BYTE, imageData); + + if (self.shouldSmoothlyScaleOutput) + { + glGenerateMipmap(GL_TEXTURE_2D); + } + glBindTexture(GL_TEXTURE_2D, 0); + }); + + if (shouldRedrawUsingCoreGraphics) + { + free(imageData); + } + else + { + CFRelease(dataFromImageDataProvider); + } + + return self; +} + +// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required. +#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_7_0) || (!defined(__IPHONE_7_0)) ) +- (void)dealloc; +{ + if (imageUpdateSemaphore != NULL) + { + dispatch_release(imageUpdateSemaphore); + } +} +#endif + +#pragma mark - +#pragma mark Image rendering + +- (void)removeAllTargets; +{ + [super removeAllTargets]; + hasProcessedImage = NO; +} + +- (void)processImage; +{ + [self processImageWithCompletionHandler:nil]; +} + +- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion; +{ + hasProcessedImage = YES; + + // dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_FOREVER); + + if (dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_NOW) != 0) + { + return NO; + } + + runAsynchronouslyOnVideoProcessingQueue(^{ + + if (MAX(pixelSizeOfImage.width, pixelSizeOfImage.height) > 1000.0) + { + [self conserveMemoryForNextFrame]; + } + + for (id currentTarget in targets) + { + NSInteger indexOfObject = [targets indexOfObject:currentTarget]; + NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue]; + + [currentTarget setCurrentlyReceivingMonochromeInput:NO]; + [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget]; +// [currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget]; + [currentTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureIndexOfTarget]; + } + + dispatch_semaphore_signal(imageUpdateSemaphore); + + if (completion != nil) { + completion(); + } + }); + + return YES; +} + +- (CGSize)outputImageSize; +{ + return pixelSizeOfImage; +} + +- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation; +{ + [super addTarget:newTarget atTextureLocation:textureLocation]; + + if (hasProcessedImage) + { + [newTarget setInputSize:pixelSizeOfImage atIndex:textureLocation]; + [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation]; + } +} + +@end \ No newline at end of file diff --git a/GPUImage/Source/iOS/GPUImageView.h b/GPUImage/Source/iOS/GPUImageView.h new file mode 100755 index 0000000..7d0465a --- /dev/null +++ b/GPUImage/Source/iOS/GPUImageView.h @@ -0,0 +1,40 @@ +#import +#import "GPUImageContext.h" + +typedef enum { + kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio + kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color + kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view +} GPUImageFillModeType; + +/** + UIView subclass to use as an endpoint for displaying GPUImage outputs + */ +@interface GPUImageView : UIView +{ + GPUImageRotationMode inputRotation; + __unsafe_unretained id textureDelegate; +} + +/** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio + */ +@property(readwrite, nonatomic) GPUImageFillModeType fillMode; + +/** This calculates the current display size, in pixels, taking into account Retina scaling factors + */ +@property(readonly, nonatomic) CGSize sizeInPixels; + +@property(nonatomic) BOOL enabled; + +/** Handling fill mode + + @param redComponent Red component for background color + @param greenComponent Green component for background color + @param blueComponent Blue component for background color + @param alphaComponent Alpha component for background color + */ +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; + +@end diff --git a/GPUImage/Source/iOS/GPUImageView.m b/GPUImage/Source/iOS/GPUImageView.m new file mode 100755 index 0000000..d634919 --- /dev/null +++ b/GPUImage/Source/iOS/GPUImageView.m @@ -0,0 +1,484 @@ +#import "GPUImageView.h" +#import +#import +#import "GPUImageContext.h" +#import "GPUImageFilter.h" +#import + +#pragma mark - +#pragma mark Private methods and instance variables + +@interface GPUImageView () +{ + GLuint inputTextureForDisplay; + GLuint displayRenderbuffer, displayFramebuffer; + + GLProgram *displayProgram; + GLint displayPositionAttribute, displayTextureCoordinateAttribute; + GLint displayInputTextureUniform; + + CGSize inputImageSize; + GLfloat imageVertices[8]; + GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha; +} + +// Initialization and teardown +- (void)commonInit; + +// Managing the display FBOs +- (void)createDisplayFramebuffer; +- (void)destroyDisplayFramebuffer; + +// Handling fill mode +- (void)recalculateViewGeometry; + +@end + +@implementation GPUImageView + +@synthesize sizeInPixels = _sizeInPixels; +@synthesize fillMode = _fillMode; +@synthesize enabled; + +#pragma mark - +#pragma mark Initialization and teardown + ++ (Class)layerClass +{ + return [CAEAGLLayer class]; +} + +- (id)initWithFrame:(CGRect)frame +{ + if (!(self = [super initWithFrame:frame])) + { + return nil; + } + + [self commonInit]; + + return self; +} + +-(id)initWithCoder:(NSCoder *)coder +{ + if (!(self = [super initWithCoder:coder])) + { + return nil; + } + + [self commonInit]; + + return self; +} + +- (void)commonInit; +{ + // Set scaling to account for Retina display + if ([self respondsToSelector:@selector(setContentScaleFactor:)]) + { + self.contentScaleFactor = [[UIScreen mainScreen] scale]; + } + + inputRotation = kGPUImageNoRotation; + self.opaque = YES; + self.hidden = NO; + CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; + eaglLayer.opaque = YES; + eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]; + + self.enabled = YES; + + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext useImageProcessingContext]; + + displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString]; + if (!displayProgram.initialized) + { + [displayProgram addAttribute:@"position"]; + [displayProgram addAttribute:@"inputTextureCoordinate"]; + + if (![displayProgram link]) + { + NSString *progLog = [displayProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [displayProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [displayProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + displayProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + } + + displayPositionAttribute = [displayProgram attributeIndex:@"position"]; + displayTextureCoordinateAttribute = [displayProgram attributeIndex:@"inputTextureCoordinate"]; + displayInputTextureUniform = [displayProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputTexture" for the fragment shader + + [GPUImageContext setActiveShaderProgram:displayProgram]; + glEnableVertexAttribArray(displayPositionAttribute); + glEnableVertexAttribArray(displayTextureCoordinateAttribute); + + [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0]; + _fillMode = kGPUImageFillModePreserveAspectRatio; + [self createDisplayFramebuffer]; + }); + + [self addObserver:self forKeyPath:@"frame" options:0 context:NULL]; +} + +- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context +{ + if (object == self && [keyPath isEqualToString:@"frame"] && (!CGSizeEqualToSize(self.bounds.size, CGSizeZero))) + { + runSynchronouslyOnVideoProcessingQueue(^{ + [self destroyDisplayFramebuffer]; + [self createDisplayFramebuffer]; + [self recalculateViewGeometry]; + }); + } +} + +- (void)dealloc +{ + [self removeObserver:self forKeyPath:@"frame"]; + + runSynchronouslyOnVideoProcessingQueue(^{ + [self destroyDisplayFramebuffer]; + }); +} + +#pragma mark - +#pragma mark Managing the display FBOs + +- (void)createDisplayFramebuffer; +{ + [GPUImageContext useImageProcessingContext]; + + glGenFramebuffers(1, &displayFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer); + + glGenRenderbuffers(1, &displayRenderbuffer); + glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer); + + [[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer]; + + GLint backingWidth, backingHeight; + + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth); + glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight); + + if ( (backingWidth == 0) || (backingHeight == 0) ) + { + [self destroyDisplayFramebuffer]; + return; + } + + _sizeInPixels.width = (CGFloat)backingWidth; + _sizeInPixels.height = (CGFloat)backingHeight; + +// NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight); + + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer); + + GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER); + NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation for display of size: %f, %f", self.bounds.size.width, self.bounds.size.height); +} + +- (void)destroyDisplayFramebuffer; +{ + [GPUImageContext useImageProcessingContext]; + + if (displayFramebuffer) + { + glDeleteFramebuffers(1, &displayFramebuffer); + displayFramebuffer = 0; + } + + if (displayRenderbuffer) + { + glDeleteRenderbuffers(1, &displayRenderbuffer); + displayRenderbuffer = 0; + } +} + +- (void)setDisplayFramebuffer; +{ + if (!displayFramebuffer) + { + [self createDisplayFramebuffer]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer); + + glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height); +} + +- (void)presentFramebuffer; +{ + glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer); + [[GPUImageContext sharedImageProcessingContext] presentBufferForDisplay]; +} + +#pragma mark - +#pragma mark Handling fill mode + +- (void)recalculateViewGeometry; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + CGFloat heightScaling, widthScaling; + + CGSize currentViewSize = self.bounds.size; + + // CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height; + // CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height; + + CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds); + + switch(_fillMode) + { + case kGPUImageFillModeStretch: + { + widthScaling = 1.0; + heightScaling = 1.0; + }; break; + case kGPUImageFillModePreserveAspectRatio: + { + widthScaling = insetRect.size.width / currentViewSize.width; + heightScaling = insetRect.size.height / currentViewSize.height; + }; break; + case kGPUImageFillModePreserveAspectRatioAndFill: + { + // CGFloat widthHolder = insetRect.size.width / currentViewSize.width; + widthScaling = currentViewSize.height / insetRect.size.height; + heightScaling = currentViewSize.width / insetRect.size.width; + }; break; + } + + imageVertices[0] = -widthScaling; + imageVertices[1] = -heightScaling; + imageVertices[2] = widthScaling; + imageVertices[3] = -heightScaling; + imageVertices[4] = -widthScaling; + imageVertices[5] = heightScaling; + imageVertices[6] = widthScaling; + imageVertices[7] = heightScaling; + }); + +// static const GLfloat imageVertices[] = { +// -1.0f, -1.0f, +// 1.0f, -1.0f, +// -1.0f, 1.0f, +// 1.0f, 1.0f, +// }; +} + +- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; +{ + backgroundColorRed = redComponent; + backgroundColorGreen = greenComponent; + backgroundColorBlue = blueComponent; + backgroundColorAlpha = alphaComponent; +} + ++ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode; +{ +// static const GLfloat noRotationTextureCoordinates[] = { +// 0.0f, 0.0f, +// 1.0f, 0.0f, +// 0.0f, 1.0f, +// 1.0f, 1.0f, +// }; + + static const GLfloat noRotationTextureCoordinates[] = { + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f, + }; + + static const GLfloat rotateRightTextureCoordinates[] = { + 1.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 0.0f, 0.0f, + }; + + static const GLfloat rotateLeftTextureCoordinates[] = { + 0.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 1.0f, 1.0f, + }; + + static const GLfloat verticalFlipTextureCoordinates[] = { + 0.0f, 0.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat horizontalFlipTextureCoordinates[] = { + 1.0f, 1.0f, + 0.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 0.0f, + }; + + static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = { + 1.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 0.0f, 1.0f, + }; + + static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = { + 1.0f, 1.0f, + 1.0f, 0.0f, + 0.0f, 1.0f, + 0.0f, 0.0f, + }; + + static const GLfloat rotate180TextureCoordinates[] = { + 1.0f, 0.0f, + 0.0f, 0.0f, + 1.0f, 1.0f, + 0.0f, 1.0f, + }; + + switch(rotationMode) + { + case kGPUImageNoRotation: return noRotationTextureCoordinates; + case kGPUImageRotateLeft: return rotateLeftTextureCoordinates; + case kGPUImageRotateRight: return rotateRightTextureCoordinates; + case kGPUImageFlipVertical: return verticalFlipTextureCoordinates; + case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates; + case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates; + case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates; + case kGPUImageRotate180: return rotate180TextureCoordinates; + } +} + +#pragma mark - +#pragma mark GPUInput protocol + +- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + [GPUImageContext setActiveShaderProgram:displayProgram]; + [self setDisplayFramebuffer]; + + glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay); + glUniform1i(displayInputTextureUniform, 4); + + glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices); + glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageView textureCoordinatesForRotation:inputRotation]); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + [self presentFramebuffer]; + }); +} + +- (NSInteger)nextAvailableTextureIndex; +{ + return 0; +} + +- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex; +{ + inputTextureForDisplay = newInputTexture; +} + +- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; +{ + inputRotation = newInputRotation; +} + +- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; +{ + runSynchronouslyOnVideoProcessingQueue(^{ + CGSize rotatedSize = newSize; + + if (GPUImageRotationSwapsWidthAndHeight(inputRotation)) + { + rotatedSize.width = newSize.height; + rotatedSize.height = newSize.width; + } + + if (!CGSizeEqualToSize(inputImageSize, rotatedSize)) + { + inputImageSize = rotatedSize; + [self recalculateViewGeometry]; + } + }); +} + +- (CGSize)maximumOutputSize; +{ + if ([self respondsToSelector:@selector(setContentScaleFactor:)]) + { + CGSize pointSize = self.bounds.size; + return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height); + } + else + { + return self.bounds.size; + } +} + +- (void)endProcessing +{ +} + +- (BOOL)shouldIgnoreUpdatesToThisTarget; +{ + return NO; +} + +- (void)setTextureDelegate:(id)newTextureDelegate atIndex:(NSInteger)textureIndex; +{ + textureDelegate = newTextureDelegate; +} + +- (void)conserveMemoryForNextFrame; +{ + +} + +- (BOOL)wantsMonochromeInput; +{ + return NO; +} + +- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; +{ + +} + +#pragma mark - +#pragma mark Accessors + +- (CGSize)sizeInPixels; +{ + if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero)) + { + return [self maximumOutputSize]; + } + else + { + return _sizeInPixels; + } +} + +- (void)setFillMode:(GPUImageFillModeType)newValue; +{ + _fillMode = newValue; + [self recalculateViewGeometry]; +} + +@end