diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..1d6910b --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at zee@zincma.de. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..de2739b --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1 @@ +Contributors transfer all ownership rights to Zinc Collective LLC upon submission of their contributions into the primary Zinc Collective LLC repository. Contributor's guarantee that they have the right to transfer ownership of their contributions; and shall indemnify Zinc Collective LLC from any and all repercussions caused by a violation of this guarantee. \ No newline at end of file diff --git a/CrossProcess.xcodeproj/project.pbxproj b/CrossProcess.xcodeproj/project.pbxproj index 7e87600..1fdde97 100644 --- a/CrossProcess.xcodeproj/project.pbxproj +++ b/CrossProcess.xcodeproj/project.pbxproj @@ -513,7 +513,7 @@ attributes = { CLASSPREFIX = CP; LastUpgradeCheck = 0720; - ORGANIZATIONNAME = "Copyright Banana Camera Company 2010 - 2012"; + ORGANIZATIONNAME = "Zinc Collective LLC"; TargetAttributes = { F79DB6031438F017004E4CA0 = { DevelopmentTeam = G8R2KJ59DE; diff --git a/CrossProcess/BCAppDelegate.h b/CrossProcess/BCAppDelegate.h index 32ea672..4f3e216 100644 --- a/CrossProcess/BCAppDelegate.h +++ b/CrossProcess/BCAppDelegate.h @@ -2,7 +2,7 @@ // BCAppDelegate.h // CrossProcess // -// Copyright 2012 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/BCGrowlView.h b/CrossProcess/BCGrowlView.h index e290d85..6afb724 100644 --- a/CrossProcess/BCGrowlView.h +++ b/CrossProcess/BCGrowlView.h @@ -2,7 +2,7 @@ // BCGrowlView.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -12,7 +12,7 @@ @property(nonatomic, assign) NSTimeInterval notificationDuration; @property(nonatomic, strong) UILabel* textLabel; -- (void) beginNotificationInViewController: (UIViewController*) vc +- (void) beginNotificationInViewController: (UIViewController*) vc withNotification: (NSString*) notification; @end diff --git a/CrossProcess/BCGrowlView.m b/CrossProcess/BCGrowlView.m index b9705cf..cf9e732 100644 --- a/CrossProcess/BCGrowlView.m +++ b/CrossProcess/BCGrowlView.m @@ -2,7 +2,7 @@ // BCGrowlView.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCGrowlView.h" @@ -17,57 +17,57 @@ @implementation BCGrowlView @synthesize notificationDuration = _notificationDuration; @synthesize textLabel = _textLabel; -- (id) initWithFrame: (CGRect) frame +- (id) initWithFrame: (CGRect) frame { if(self = [super initWithFrame:frame]) { self.notificationDuration = 2.5; - + // set up a rounded border CALayer* layer = [self layer]; - + // clear the view's background color so that our background // fits within the rounded border self.backgroundColor = [UIColor clearColor]; layer.backgroundColor = [UIColor grayColor].CGColor; - + layer.borderWidth = 0.0f; layer.cornerRadius = 12.0f; - + self.textLabel = [[UILabel alloc] initWithFrame: self.layer.frame]; self.textLabel.backgroundColor = [UIColor clearColor]; self.textLabel.textColor = [UIColor whiteColor]; self.textLabel.font = [UIFont systemFontOfSize: 18.0]; self.textLabel.textAlignment = NSTextAlignmentCenter; - + [self addSubview: self.textLabel]; } - + return self; } -- (void) beginNotificationInViewController: (UIViewController*) vc +- (void) beginNotificationInViewController: (UIViewController*) vc withNotification: (NSString*) notification { self.textLabel.text = notification; [self.textLabel sizeToFit]; self.textLabel.frame = CenterRectOverRect(self.textLabel.frame, self.frame); [self.textLabel setNeedsDisplay]; - + self.alpha = 0.0; self.frame = CenterRectOverRect(self.frame, vc.view.frame); [vc.view addSubview: self]; - - [UIView animateWithDuration:kRevealAnimationDuration + + [UIView animateWithDuration:kRevealAnimationDuration animations:^() { self.alpha = 0.8; } completion:^(BOOL finished) { - [UIView animateWithDuration: kDismissAnimationDuration - delay: self.notificationDuration - options: 0 + [UIView animateWithDuration: kDismissAnimationDuration + delay: self.notificationDuration + options: 0 animations:^() { self.alpha = 0.0; diff --git a/CrossProcess/BCImage.h b/CrossProcess/BCImage.h index 90ad203..e7ad075 100644 --- a/CrossProcess/BCImage.h +++ b/CrossProcess/BCImage.h @@ -2,7 +2,7 @@ // BCImage.h // Baboon // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -19,10 +19,10 @@ uint32_t* _rawBytes; // Raw pixel buffer uint32_t _bufferSize; // Pixel buffer size in bytes uint32_t _rowBytes; // Number of bytes per row (16-byte aligned) - + CGSize _size; // Geometric size UIImageOrientation _orientation; // Orientation - + unsigned char _reds[256]; unsigned char _greens[256]; unsigned char _blues[256]; @@ -63,7 +63,7 @@ #define SET_RED_COMPONENT_RGBA(pixel, value) *pixel = (*pixel & 0xFFFFFF00) | (value << 0) // ARGB use this if CGImageAlphaInfo == kCGImageAlphaPremultipliedFirst - + #define BLUE_COMPONENT_ARGB(pixel) (unsigned char)(*pixel >> 24) #define GREEN_COMPONENT_ARGB(pixel) (unsigned char)(*pixel >> 16) #define RED_COMPONENT_ARGB(pixel) (unsigned char)(*pixel >> 8) diff --git a/CrossProcess/BCImage.m b/CrossProcess/BCImage.m index 2bc5694..f43ba0e 100644 --- a/CrossProcess/BCImage.m +++ b/CrossProcess/BCImage.m @@ -2,7 +2,7 @@ // BCImage.mm // Baboon // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCImage.h" @@ -26,24 +26,24 @@ @implementation BCImage + (CGColorSpaceRef) deviceRGBColorSpace { static CGColorSpaceRef sColorspace = nil; - + if(!sColorspace) { sColorspace = CGColorSpaceCreateDeviceRGB(); } - + return sColorspace; } + (CGColorRef) genericGrayColor80 { static CGColorRef sGenericGrayColor80; - + if(!sGenericGrayColor80) { sGenericGrayColor80 = CreateDeviceGrayColor(0.8, 0.2); } - + return sGenericGrayColor80; } @@ -52,22 +52,22 @@ + (BCImage*) imageWithUIImage: (UIImage*) image scale: (CGFloat) scale crop: (CG { CGSize sourceSize = image.size; CGSize destinationSize = CGSizeZero; - + destinationSize.width = roundf(sourceSize.width * scale); destinationSize.height = roundf(sourceSize.height * scale); - + crop.size.width = roundf(crop.size.width * scale); crop.size.height = roundf(crop.size.height * scale); - + BCImage* resultImage = [[BCImage alloc] initWithSize: destinationSize orientation: image.imageOrientation]; - + if(resultImage) { CGAffineTransform tf = CGAffineTransformMakeScale(scale, scale); - + [resultImage pushContext]; - + CGContextSetInterpolationQuality(resultImage.context, kCGInterpolationHigh); CGContextConcatCTM(resultImage.context, tf); CGContextConcatCTM(resultImage.context, AdjustedTransform(image.imageOrientation, sourceSize.width, sourceSize.height)); @@ -78,7 +78,7 @@ + (BCImage*) imageWithUIImage: (UIImage*) image scale: (CGFloat) scale crop: (CG [resultImage popContext]; } - + return resultImage; } */ @@ -89,20 +89,20 @@ - (id) initWithSize: (CGSize) imageSize scale: (CGFloat) scale orientation: (UII { _size.width = roundf(imageSize.width * scale); _size.height = roundf(imageSize.height * scale); - + CGColorSpaceRef colorSpace = [[self class] deviceRGBColorSpace]; - + _rowBytes = _size.width * 4; _rowBytes = (_rowBytes + 15) & ~15; - + CGBitmapInfo bitmapInfo = (CGBitmapInfo) kCGImageAlphaPremultipliedLast; - + _bufferSize = _rowBytes * _size.height; _rawBytes = (uint32_t*)calloc(sizeof(unsigned char), _bufferSize); _contextRef = CGBitmapContextCreate(_rawBytes, _size.width, _size.height, 8, _rowBytes, colorSpace, bitmapInfo); _orientation = orientation; } - + return self; } @@ -110,7 +110,7 @@ - (void) dealloc { CGContextRelease(_contextRef); _contextRef = NULL; - + if(_rawBytes) { free(_rawBytes); @@ -125,7 +125,7 @@ - (CGContextRef) pushContext UIGraphicsPushContext(_contextRef); CGContextSaveGState(_contextRef); } - + return _contextRef; } @@ -146,7 +146,7 @@ - (CGImageRef) CGImageRef { result = CGBitmapContextCreateImage(_contextRef); } - + return result; } @@ -156,7 +156,7 @@ - (NSUInteger) pCurveApplyMask: (NSArray*) curves BCImageCurve* redCurve = [curves objectAtIndex: 1]; BCImageCurve* greenCurve = [curves objectAtIndex: 2]; BCImageCurve* blueCurve = [curves objectAtIndex: 3]; - + return ((rgbCurve.identity ? 0 : CURVE_COLORS) | (redCurve.identity ? 0 : CURVE_RED) | (greenCurve.identity ? 0 : CURVE_GREEN) | @@ -166,14 +166,14 @@ - (NSUInteger) pCurveApplyMask: (NSArray*) curves - (void) applyCurves: (NSArray*) curves { NSUInteger curvesMask = [self pCurveApplyMask: curves]; - + if(curvesMask != CURVE_NONE) { BCImageCurve* rgbCurve = [curves objectAtIndex: 0]; BCImageCurve* redCurve = [curves objectAtIndex: 1]; BCImageCurve* greenCurve = [curves objectAtIndex: 2]; BCImageCurve* blueCurve = [curves objectAtIndex: 3]; - + for(unsigned int i = 0; i <= 255; ++i) { switch(curvesMask) @@ -222,10 +222,10 @@ - (void) applyCurves: (NSArray*) curves } } } - + uint32_t* currentPixel = _rawBytes; uint32_t* lastPixel = (uint32_t*)((unsigned char*)_rawBytes + _bufferSize); - + while(currentPixel < lastPixel) { SET_RED_COMPONENT_RGBA(currentPixel, _reds[RED_COMPONENT_RGBA(currentPixel)]); @@ -241,7 +241,7 @@ - (void) applyLevels: (BCImageLevels*) levels uint32_t* currentPixel = _rawBytes; uint32_t* lastPixel = (uint32_t*)((unsigned char*)_rawBytes + _bufferSize); uint32_t* imageLevels = levels.imageLevels; - + while(currentPixel < lastPixel) { SET_RED_COMPONENT_RGBA(currentPixel, imageLevels[RED_COMPONENT_RGBA(currentPixel)]); diff --git a/CrossProcess/BCImageCaptureController.h b/CrossProcess/BCImageCaptureController.h index 2bd1931..69babad 100644 --- a/CrossProcess/BCImageCaptureController.h +++ b/CrossProcess/BCImageCaptureController.h @@ -2,7 +2,7 @@ // BCImageCaptureController.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/BCImageCaptureController.m b/CrossProcess/BCImageCaptureController.m index c99d8e9..3fa30a1 100644 --- a/CrossProcess/BCImageCaptureController.m +++ b/CrossProcess/BCImageCaptureController.m @@ -2,7 +2,7 @@ // BCImageCaptureController.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCImageCaptureController.h" @@ -19,12 +19,12 @@ @implementation BCImageCaptureController - (id) initWithNibName: (NSString*) nibNameOrNil bundle: (NSBundle*) nibBundleOrNil { self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil]; - if (self) + if (self) { self.imagePickerController = [[UIImagePickerController alloc] init]; self.imagePickerController.delegate = self; } - + return self; } @@ -38,7 +38,7 @@ - (void)didReceiveMemoryWarning - (void) viewDidLoad { [super viewDidLoad]; - + self.view.backgroundColor = [UIColor clearColor]; self.view.userInteractionEnabled = NO; } @@ -57,7 +57,7 @@ - (BOOL) shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfac - (void) setupForImageCapture: (UIImagePickerControllerSourceType) sourceType { self.imagePickerController.sourceType = sourceType; - + if(sourceType == UIImagePickerControllerSourceTypeCamera) { // user wants to use the camera interface @@ -69,12 +69,12 @@ - (void) setupForImageCapture: (UIImagePickerControllerSourceType) sourceType self.imagePickerController.cameraFlashMode = UIImagePickerControllerCameraFlashModeAuto; NSUserDefaults* defaults = [NSUserDefaults standardUserDefaults]; - + if([defaults objectForKey: BCCameraDeviceKey]) { self.imagePickerController.cameraDevice = [defaults integerForKey: BCCameraDeviceKey]; } - + if([defaults objectForKey: BCCameraFlashModeKey]) { self.imagePickerController.cameraFlashMode = [defaults integerForKey: BCCameraFlashModeKey]; @@ -87,13 +87,13 @@ - (void) setupForImageCapture: (UIImagePickerControllerSourceType) sourceType - (void) imagePickerController: (UIImagePickerController*) picker didFinishPickingMediaWithInfo: (NSDictionary*) info { UIImage* image = [info valueForKey: UIImagePickerControllerOriginalImage]; - + if(picker.sourceType == UIImagePickerControllerSourceTypeCamera) { NSUserDefaults* defaults = [NSUserDefaults standardUserDefaults]; [defaults setInteger: picker.cameraDevice forKey: BCCameraDeviceKey]; [defaults setInteger: picker.cameraFlashMode forKey: BCCameraFlashModeKey]; - + NSDictionary* imageMetadata = [info objectForKey: UIImagePickerControllerMediaMetadata]; [self.delegate userCapturedPhoto: image withMetadata: imageMetadata]; } diff --git a/CrossProcess/BCImageCurve.h b/CrossProcess/BCImageCurve.h index 75484ee..f4313c6 100644 --- a/CrossProcess/BCImageCurve.h +++ b/CrossProcess/BCImageCurve.h @@ -2,7 +2,7 @@ // BCImageCurve.h // Baboon // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -26,7 +26,7 @@ enum double* _samples; BOOL _identity; UIColor* _curveColor; - + uint _pixels[256]; // 1K buffer BOOL _preprocessed; } diff --git a/CrossProcess/BCImageCurve.m b/CrossProcess/BCImageCurve.m index 1a54287..9c71324 100644 --- a/CrossProcess/BCImageCurve.m +++ b/CrossProcess/BCImageCurve.m @@ -2,7 +2,7 @@ // BCImageCurve.mm // Baboon // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCImageCurve.h" @@ -21,9 +21,9 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path BCImageCurve* greenCurve = [[BCImageCurve alloc] init]; BCImageCurve* blueCurve = [[BCImageCurve alloc] init]; BCImageCurve* alphaCurve = [[BCImageCurve alloc] init]; - + NSData* curveFile = [NSData dataWithContentsOfFile: path]; - + if(curveFile) { const short* data = (const short*)[curveFile bytes]; @@ -31,7 +31,7 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path ++data; short second = OSSwapConstInt16(*data); ++data; - + if(first == 0x04 && second == 0x05) { short numPoints; @@ -39,22 +39,22 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path double testY; // ACV file always stores RGB, Red, Green, Blue, and Alpha curves - + // colors - + numPoints = OSSwapConstInt16(*data); ++data; - + for(short i = 0; i < numPoints; ++i) { short y = OSSwapConstInt16(*data); ++data; short x = OSSwapConstInt16(*data); ++data; - + [rgbCurve setPoint: i xValue: (double)x / 255.0 yValue: y / 255.0 recalculate: YES]; } - + if(numPoints == 2) { [rgbCurve getPoint: 0 xValue: &testX yValue: &testY]; @@ -66,21 +66,21 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path [rgbCurve resetToIdentity]; } } - + } // red - + numPoints = OSSwapConstInt16(*data); ++data; - + for(short i = 0; i < numPoints; ++i) { short y = OSSwapConstInt16(*data); ++data; short x = OSSwapConstInt16(*data); ++data; - + [redCurve setPoint: i xValue: (double)x / 255.0 yValue: y / 255.0 recalculate: YES]; } @@ -95,21 +95,21 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path [redCurve resetToIdentity]; } } - + } - + // green - + numPoints = OSSwapConstInt16(*data); ++data; - + for(short i = 0; i < numPoints; ++i) { short y = OSSwapConstInt16(*data); ++data; short x = OSSwapConstInt16(*data); ++data; - + [greenCurve setPoint: i xValue: (double)x / 255.0 yValue: y / 255.0 recalculate: YES]; } @@ -124,21 +124,21 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path [greenCurve resetToIdentity]; } } - + } - + // blue - + numPoints = OSSwapConstInt16(*data); ++data; - + for(short i = 0; i < numPoints; ++i) { short y = OSSwapConstInt16(*data); ++data; short x = OSSwapConstInt16(*data); ++data; - + [blueCurve setPoint: i xValue: (double)x / 255.0 yValue: y / 255.0 recalculate: YES]; } @@ -153,37 +153,37 @@ + (NSArray*) imageCurvesFromACV: (NSString*) path [blueCurve resetToIdentity]; } } - + } - + // alpha numPoints = OSSwapConstInt16(*data); ++data; - + for(short i = 0; i < numPoints; ++i) { short y = OSSwapConstInt16(*data); ++data; short x = OSSwapConstInt16(*data); ++data; - + [alphaCurve setPoint: i xValue: (double)x / 255.0 yValue: y / 255.0 recalculate: YES]; } - + // for now always make it an identity curve [alphaCurve resetToIdentity]; } } - - + + NSArray* curves = [NSArray arrayWithObjects: rgbCurve, redCurve, greenCurve, blueCurve, alphaCurve, nil]; - + [rgbCurve preprocessCurve]; [redCurve preprocessCurve]; [greenCurve preprocessCurve]; [blueCurve preprocessCurve]; [alphaCurve preprocessCurve]; - + return curves; } @@ -197,7 +197,7 @@ - (id) init [self setNumPoints: 17]; [self setNumSamples: 256]; } - + return self; } @@ -228,16 +228,16 @@ - (void) setNumPoints: (int) numPoints _numPoints = numPoints; _points = (CGPoint*)malloc(sizeof(CGPoint) * _numPoints); - + _points[0].x = 0.0; _points[0].y = 0.0; - + for(int i = 1; i < _numPoints - 1; i++) { _points[i].x = -1.0; _points[i].y = -1.0; } - + _points[_numPoints - 1].x = 1.0; _points[_numPoints - 1].y = 1.0; _identity = YES; @@ -253,13 +253,13 @@ - (void) setNumSamples: (int) numSamples free((void*)_samples); _samples = NULL; } - + _numSamples = numSamples; _samples = (double*)malloc(sizeof(double) * _numSamples); - + for(int i = 0; i < _numSamples; i++) _samples[i] = (double) i / (double) (_numSamples - 1); - + _identity = YES; } } @@ -268,10 +268,10 @@ - (void) resetToIdentity { _numPoints = 0; [self setNumPoints: 17]; - + _numSamples = 0; [self setNumSamples: 256]; - + // _identity gets set from methods above. } @@ -279,7 +279,7 @@ - (int) getClosestPoint: (double) x { int closestPoint = 0; double distance = DBL_MAX; - + for(int i = 0; i < _numPoints; ++i) { if(_points[i].x >= 0.0 && fabs(x - _points[i].x) < distance) @@ -293,7 +293,7 @@ - (int) getClosestPoint: (double) x { closestPoint = ROUND(x * (double)(_numPoints - 1)); } - + return closestPoint; } @@ -305,7 +305,7 @@ - (void) setPoint: (int) point xValue: (double) x yValue: (double) y recalculate { _points[point].x = x; _points[point].y = y; - + if(flag) { [self dirty]; @@ -336,7 +336,7 @@ - (void) getPoint: (int) point xValue: (double*) outX yValue: (double*) outY { *outX = _points[point].x; } - + if(outY) { *outY = _points[point].y; @@ -357,7 +357,7 @@ - (void) calculate { int* points = (int*)malloc(sizeof(int) * _numPoints); int numPoints = 0; - + // cycle through the curves for(int i = 0; i < _numPoints; i++) { @@ -370,24 +370,24 @@ - (void) calculate break; } } - + if(numPoints > 0) { CGPoint point; int boundary; - + // initialize boundary curve points point = _points[points[0]]; boundary = ROUND(point.x * (double)(_numSamples - 1)); - + for(int i = 0; i < boundary; i++) { _samples[i] = point.y; } - + point = _points[points[numPoints - 1]]; boundary = ROUND(point.x * (double) (_numSamples - 1)); - + for(int i = boundary; i < _numSamples; i++) { _samples[i] = point.y; @@ -399,20 +399,20 @@ - (void) calculate int p2 = points[i]; int p3 = points[i + 1]; int p4 = points[MIN(i + 2, numPoints - 1)]; - + [self plotCurveP1: p1 p2: p2 p3: p3 p4: p4]; } - + // ensure that the control points are used exactly for(int i = 0; i < numPoints; i++) { double x = _points[points[i]].x; double y = _points[points[i]].y; - + _samples[ROUND (x * (double)(_numSamples - 1))] = y; } } - + free((void*)points); } @@ -422,7 +422,7 @@ - (void) plotCurveP1: (int) p1 p2: (int) p2 p3: (int) p3 p4: (int) p4 double y0, y1, y2, y3; double dx, dy; double slope; - + x0 = _points[p2].x; y0 = _points[p2].y; x3 = _points[p3].x; @@ -430,7 +430,7 @@ - (void) plotCurveP1: (int) p1 p2: (int) p2 p3: (int) p3 p4: (int) p4 dx = x3 - x0; dy = y3 - y0; - + if(dx > 0) { if(p1 == p2 && p3 == p4) @@ -441,38 +441,38 @@ - (void) plotCurveP1: (int) p1 p2: (int) p2 p3: (int) p3 p4: (int) p4 else if (p1 == p2 && p3 != p4) { slope = (_points[p4].y - y0) / (_points[p4].x - x0); - + y2 = y3 - slope * dx / 3.0; y1 = y0 + (y2 - y0) / 2.0; } else if (p1 != p2 && p3 == p4) { slope = (y3 - _points[p1].y) / (x3 - _points[p1].x); - + y1 = y0 + slope * dx / 3.0; y2 = y3 + (y1 - y3) / 2.0; } else { slope = (y3 - _points[p1].y) / (x3 - _points[p1].x); - + y1 = y0 + slope * dx / 3.0; - + slope = (_points[p4].y - y0) / (_points[p4].x - x0); - + y2 = y3 - slope * dx / 3.0; } - + for(int i = 0; i <= ROUND(dx * (double)(_numSamples - 1)); i++) { double y, t; int index; - + t = i / dx / (double) (_numSamples - 1); y = y0 * (1-t) * (1-t) * (1-t) + 3 * y1 * (1-t) * (1-t) * t + 3 * y2 * (1-t) * t * t + y3 * t * t * t; - + index = i + ROUND(x0 * (double)(_numSamples - 1)); - + if(index < _numSamples) { _samples[index] = CLAMP(y, 0.0, 1.0); @@ -491,7 +491,7 @@ - (void) dirty - (double) mapValue: (double) value { double result = value; - + if(_identity == NO) { if(value < 0.0) @@ -505,15 +505,15 @@ - (double) mapValue: (double) value else { value = value * (_numSamples - 1); - + int index = (int) value; - + double f = value - index; - + result = (1.0 - f) * _samples[index] + f * _samples[index + 1]; } } - + return result; } @@ -536,7 +536,7 @@ - (void) preprocessCurve double newValue = [self mapValue: (double)i / 255.0]; _pixels[i] = (uint)((newValue * 255.0f) + 0.5); } - + _preprocessed = YES; } diff --git a/CrossProcess/BCImageLevels.h b/CrossProcess/BCImageLevels.h index f7f12f7..3ed31ec 100644 --- a/CrossProcess/BCImageLevels.h +++ b/CrossProcess/BCImageLevels.h @@ -2,7 +2,7 @@ // BCImageLevels.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/BCImageLevels.m b/CrossProcess/BCImageLevels.m index c1fc565..7df3480 100644 --- a/CrossProcess/BCImageLevels.m +++ b/CrossProcess/BCImageLevels.m @@ -2,7 +2,7 @@ // BCImageLevels.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCImageLevels.h" @@ -23,15 +23,15 @@ - (id) initWithShadowLevel: (NSUInteger) shadowLevel hilightLevel: (NSUInteger) { if(hilightLevel > 255) { - hilightLevel = 255; + hilightLevel = 255; } - + _shadowLevel = shadowLevel; _hilightLevel = hilightLevel; _imageLevels = (uint32_t*)malloc(sizeof(uint32_t) * 256); [self pGenerateLevels]; } - + return self; } @@ -55,9 +55,9 @@ - (void) pGenerateLevels NSUInteger max_b = self.hilightLevel; NSUInteger a_span = max_a - min_a; NSUInteger b_span = max_b - min_b; - + double scaleFactor = (double) b_span / (double) a_span; - + for(NSUInteger i = 0; i < 256; ++i) { _imageLevels[i] = min_b + (i - min_a) * scaleFactor; diff --git a/CrossProcess/BCImageScrollView.h b/CrossProcess/BCImageScrollView.h index ad85751..f4b9222 100644 --- a/CrossProcess/BCImageScrollView.h +++ b/CrossProcess/BCImageScrollView.h @@ -2,7 +2,7 @@ // BCImageScrollView.h // CrossProcess // -// Copyright 2010-2011 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/BCImageScrollView.m b/CrossProcess/BCImageScrollView.m index b6fe435..2a205d7 100644 --- a/CrossProcess/BCImageScrollView.m +++ b/CrossProcess/BCImageScrollView.m @@ -2,7 +2,7 @@ // BCImageScrollView.m // CrossProcess // -// Copyright 2010-2011 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCImageScrollView.h" @@ -30,13 +30,13 @@ - (id) initWithCoder: (NSCoder*) decoder self.clipsToBounds = YES; self.scrollEnabled = YES; self.showsHorizontalScrollIndicator = NO; - self.showsVerticalScrollIndicator = NO; + self.showsVerticalScrollIndicator = NO; self.decelerationRate = UIScrollViewDecelerationRateFast; //self.delegate = self; - + _imageList = [[NSMutableArray alloc] initWithCapacity: 10]; } - + return self; } @@ -50,7 +50,7 @@ - (void) setImage: (BCImage*) image forIndex: (NSUInteger) index { assert(index < self.subviews.count); assert(image); - + BCImageView* view = [self.subviews objectAtIndex: index]; if(view) { @@ -62,11 +62,11 @@ - (NSUInteger) addImage: (CPPlaceholderType) type portraitOrientation: (BOOL) is { BCImageView* view = [[BCImageView alloc] initWithPlaceholder: type portraitOrientation: isPortrait]; view.frame = CGRectOffset(BCViewFrame, -BCViewFrame.size.width, 0); - + // Always insert at beginning of list [self insertSubview: view atIndex: 0]; - - [UIView animateWithDuration: 2.0 + + [UIView animateWithDuration: 2.0 animations:^ { [self pLayoutImageViews]; @@ -97,13 +97,13 @@ - (BCImageView*) currentImage BCImageView* view = nil; CGPoint contentOffset = self.contentOffset; NSUInteger index = contentOffset.x / BCViewFrame.size.width; - + if(index < self.subviews.count) { view = [self.subviews objectAtIndex: index]; } - - return view; + + return view; } - (NSURL*) currentImageURL @@ -111,45 +111,45 @@ - (NSURL*) currentImageURL NSURL* url = nil; CGPoint contentOffset = self.contentOffset; NSUInteger index = contentOffset.x / BCViewFrame.size.width; - + if(index < _imageList.count) { url = [_imageList objectAtIndex: index]; } - - return url; + + return url; } - (void) scrollViewDidEndDragging: (UIScrollView*) scrollView willDecelerate: (BOOL) decelerate { - + } - (void) scrollViewDidEndDecelerating: (UIScrollView*) scrollView { - + } - (void) scrollViewDidScroll: (UIScrollView*) scrollView { - // Make sure offsets - + // Make sure offsets + CGPoint contentOffset = self.contentOffset; BCImageView* closestView = nil; CGFloat lastDistance = 0.0f; - + for(BCImageView* subview in self.subviews) { CGRect viewFrame = subview.frame; CGFloat distance = fabs(contentOffset.x - viewFrame.origin.x); - + if(!closestView || distance < lastDistance) { closestView = subview; lastDistance = distance; } } - + if(closestView) { contentOffset.x = closestView.frame.origin.x; @@ -162,17 +162,17 @@ - (void) pLayoutImageViews CGFloat curXPosition = 0.0f; CGFloat gutterWidth = 8.0f; CGFloat contentWidth = 0.0f; - + for(BCImageView* subview in self.subviews) { CGRect viewFrame = subview.frame; viewFrame.origin = CGPointMake(curXPosition, 0.0f); subview.frame = viewFrame; - + contentWidth += viewFrame.size.width + gutterWidth; curXPosition += viewFrame.size.width + gutterWidth; } - + if(contentWidth == 0.0f) { contentWidth = BCViewFrame.size.width; @@ -181,7 +181,7 @@ - (void) pLayoutImageViews { contentWidth -= gutterWidth; } - + self.contentSize = CGSizeMake(contentWidth, BCViewFrame.size.height); } diff --git a/CrossProcess/BCImageView.h b/CrossProcess/BCImageView.h index 54bddaa..e0ed0c2 100644 --- a/CrossProcess/BCImageView.h +++ b/CrossProcess/BCImageView.h @@ -2,7 +2,7 @@ // BCImageView.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/BCImageView.m b/CrossProcess/BCImageView.m index 85db47d..db28af6 100644 --- a/CrossProcess/BCImageView.m +++ b/CrossProcess/BCImageView.m @@ -2,7 +2,7 @@ // BCImageView.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCImageView.h" @@ -32,7 +32,7 @@ - (id) initWithFrame: (CGRect) frame { self.layer.contentsGravity = kCAGravityResizeAspect; } - + return self; } @@ -44,7 +44,7 @@ - (id) initWithImage: (BCImage*) image self.layer.contentsGravity = kCAGravityResizeAspect; self.layer.contents = (id)image.CGImageRef; } - + return self; } @@ -53,31 +53,31 @@ - (id) initWithPlaceholder: (CPPlaceholderType) placeholderType portraitOrientat if(self = [super initWithFrame: BCViewFrame]) { _portraitOrientation = isPortrait; - + UIImage* placeholderImage = [self pLoadPlaceholderImage: placeholderType]; if(self.portraitOrientation == NO) { CGSize imageSize = placeholderImage.size; // 320 wide x 428 high - + UIGraphicsBeginImageContextWithOptions(CGSizeMake(imageSize.height, imageSize.width), YES, 0.0f); CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0f, imageSize.width); transform = CGAffineTransformRotate(transform, radians(-90)); CGContextConcatCTM(UIGraphicsGetCurrentContext(), transform); - + [placeholderImage drawAtPoint: CGPointZero]; placeholderImage = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); + UIGraphicsEndImageContext(); } self.layer.contentsGravity = kCAGravityResizeAspect; self.layer.contents = (id)placeholderImage.CGImage; } - + return self; } - + - (id) initWithImageURL: (NSURL*) imageURL { if(self = [super initWithFrame: BCViewFrame]) @@ -85,7 +85,7 @@ - (id) initWithImageURL: (NSURL*) imageURL self.layer.contentsGravity = kCAGravityResizeAspect; [self pUseImageURL: imageURL]; } - + return self; } @@ -94,7 +94,7 @@ - (id) initWithImageURL: (NSURL*) imageURL - (void) drawRect: (CGRect) rect { CGContextRef context = UIGraphicsGetCurrentContext(); - + CGContextSetStrokeColorWithColor(context, [UIColor redColor].CGColor); CGContextStrokeRectWithWidth(context, rect, 4.0); } @@ -117,9 +117,9 @@ - (void) useAssetFrame: (NSString*) frame content: (NSString*) content CALayer* contentLayer = [CALayer layer]; contentLayer.contentsGravity = kCAGravityResizeAspect; contentLayer.bounds = self.layer.bounds; - contentLayer.position = self.layer.position; + contentLayer.position = self.layer.position; contentLayer.contents = (__bridge id)contentAsset.CGImage; - + [self.layer addSublayer: contentLayer]; [self.layer addSublayer: frameLayer]; } @@ -128,7 +128,7 @@ - (void) useAssetFrame: (NSString*) frame content: (NSString*) content - (void) useAsset: (id) asset { // Asset can be an a url, a uiimage, or a placeholder - + NSURL* assetURL = BCCastAsClass(NSURL, asset); UIImage* assetUIImage = BCCastAsClass(UIImage, asset); BCImage* assetImage = BCCastAsClass(BCImage, asset); @@ -140,10 +140,10 @@ - (void) useAsset: (id) asset CATransition* crossfade = [CATransition animation]; crossfade.type = kCATransitionFade; crossfade.duration = 2.0; - + [self.layer addAnimation: crossfade forKey: kCATransition]; } - + if(assetURL) { [self pUseImageURL: assetURL]; @@ -160,22 +160,22 @@ - (void) useAsset: (id) asset else if(assetPlaceholder) { NSInteger placeholderType = [assetPlaceholder integerValue]; - + _portraitOrientation = placeholderType >= 0; - + CPPlaceholderType type = (CPPlaceholderType)labs(placeholderType); UIImage* placeholderImage = [self pLoadPlaceholderImage: type]; UIImage* borderImage = nil; - + if((type & CPPlaceholderBorder) != 0) { borderImage = [self pLoadPlaceholderBorderImage]; } - + if(CGSizeEqualToSize(self.naturalSize, CGSizeZero)) { CGSize imageSize = placeholderImage.size; // 320 wide x 428 high - + if(self.portraitOrientation == NO) { UIGraphicsBeginImageContextWithOptions(CGSizeMake(imageSize.height, imageSize.width), YES, 0.0f); @@ -184,16 +184,16 @@ - (void) useAsset: (id) asset transform = CGAffineTransformRotate(transform, radians(-90)); CGContextConcatCTM(UIGraphicsGetCurrentContext(), transform); } - else + else { UIGraphicsBeginImageContextWithOptions(CGSizeMake(imageSize.width, imageSize.height), YES, 0.0f); } - + [placeholderImage drawAtPoint: CGPointZero]; [borderImage drawAtPoint: CGPointZero]; - + placeholderImage = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); + UIGraphicsEndImageContext(); } else { @@ -202,12 +202,12 @@ - (void) useAsset: (id) asset UIGraphicsBeginImageContextWithOptions(CGSizeMake(placeholderSize.width, placeholderSize.height), YES, 0.0f); [placeholderImage drawInRect: CGRectMake(0.0, 0.0, placeholderSize.width, placeholderSize.height)]; [borderImage drawInRect: CGRectMake(0.0, 0.0, placeholderSize.width, placeholderSize.height)]; - + placeholderImage = UIGraphicsGetImageFromCurrentImageContext(); - UIGraphicsEndImageContext(); + UIGraphicsEndImageContext(); } - self.layer.contents = (__bridge id)placeholderImage.CGImage; + self.layer.contents = (__bridge id)placeholderImage.CGImage; } else { @@ -221,9 +221,9 @@ - (void) pUseImageURL: (NSURL*) imageURL { NSError* error = nil; UIImage* image = [UIImage imageWithData: [NSData dataWithContentsOfURL: imageURL - options: NSDataReadingMappedIfSafe + options: NSDataReadingMappedIfSafe error: &error]]; - + if(image) { [self pCrossFadeLayer]; @@ -238,15 +238,15 @@ - (void) pUseImageURL: (NSURL*) imageURL else { ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init]; // AppDelegate().assetLibrary; - - [library assetForURL: imageURL - resultBlock:^(ALAsset *asset) + + [library assetForURL: imageURL + resultBlock:^(ALAsset *asset) { assert([NSThread isMainThread]); - + ALAssetRepresentation* rep = [asset defaultRepresentation]; CGImageRef fullscreenImageRef = [rep fullScreenImage]; - + if(fullscreenImageRef) { [self pCrossFadeLayer]; @@ -257,7 +257,7 @@ - (void) pUseImageURL: (NSURL*) imageURL { NSLog(@"Failed to load asset"); } - } + } failureBlock:^(NSError *error) { NSLog(@"%@", [error description]); @@ -272,7 +272,7 @@ - (void) pCrossFadeLayer CATransition* crossfade = [CATransition animation]; crossfade.type = kCATransitionFade; crossfade.duration = 2.0; - + [self.layer addAnimation: crossfade forKey: kCATransition]; } } @@ -286,7 +286,7 @@ - (void) clearContent - (UIImage*) pLoadPlaceholderBorderImage { UIImage* image = [UIImage imageNamed: @"preview_border"]; - + if(!image) { #if DEBUG @@ -295,16 +295,16 @@ - (UIImage*) pLoadPlaceholderBorderImage image = [UIImage imageNamed: [@"preview_border" stringByAppendingPathExtension: @"png"]]; } assert(image); - + return image; } - (UIImage*) pLoadPlaceholderImage: (CPPlaceholderType) placeholderType { NSString* placeholderName = nil; - + NSUInteger type = placeholderType & ~CPPlaceholderBorder; - + switch(type) { case CPPlaceholderBasic: @@ -342,9 +342,9 @@ - (UIImage*) pLoadPlaceholderImage: (CPPlaceholderType) placeholderType break; } } - + UIImage* image = nil; - + if(placeholderName) { image = [UIImage imageNamed: placeholderName]; diff --git a/CrossProcess/BCMiscellaneous.h b/CrossProcess/BCMiscellaneous.h index 3fe77a0..2db8b79 100644 --- a/CrossProcess/BCMiscellaneous.h +++ b/CrossProcess/BCMiscellaneous.h @@ -2,7 +2,7 @@ // BCMiscellaneous.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -17,13 +17,13 @@ #ifdef __cplusplus extern "C" { #endif - + id BCDynamicCast(Class c, id src); id BCProtocolCast(Protocol* protocol, id src); CFTypeRef BCCFTypeCast(CFTypeID typeId, CFTypeRef src); NS_INLINE CGFloat radians(CGFloat degrees) { return (CGFloat)(degrees * M_PI / 180.0f); } - + #ifdef __cplusplus } #endif @@ -31,7 +31,7 @@ extern "C" { /* CGFloat scale = MIN (r.size.width / s.width, r.size.height / s.height); - s.width = trunc(s.width * scale); + s.width = trunc(s.width * scale); s.height = trunc(s.height * scale); r.origin.x += trunc((r.size.width - s.width) * .5); r.size.width = s.width; diff --git a/CrossProcess/BCMiscellaneous.m b/CrossProcess/BCMiscellaneous.m index 4513a5a..17c7ff9 100644 --- a/CrossProcess/BCMiscellaneous.m +++ b/CrossProcess/BCMiscellaneous.m @@ -2,7 +2,7 @@ // BCMiscellaneous.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCMiscellaneous.h" @@ -11,10 +11,10 @@ id BCDynamicCast(Class c, id src) { id result = nil; - + if(src != nil && [src isKindOfClass: c]) result = src; - + return result; } @@ -22,10 +22,10 @@ id BCDynamicCast(Class c, id src) id BCProtocolCast(Protocol* protocol, id src) { id result = nil; - + if(src != nil && [src conformsToProtocol: protocol]) result = src; - + return result; } @@ -33,10 +33,10 @@ id BCProtocolCast(Protocol* protocol, id src) CFTypeRef BCCFTypeCast(CFTypeID typeId, CFTypeRef src) { CFTypeRef result = NULL; - + if (src != NULL && CFGetTypeID(src) == typeId) result = src; - + return result; } diff --git a/CrossProcess/BCTimer.h b/CrossProcess/BCTimer.h index 8d92098..0f5a253 100644 --- a/CrossProcess/BCTimer.h +++ b/CrossProcess/BCTimer.h @@ -2,12 +2,12 @@ // BCTimer.h // CrossProcess // -// Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import -@interface BCTimer : NSObject +@interface BCTimer : NSObject @property(nonatomic, strong, readonly) NSDate* start; @property(nonatomic, strong, readonly) NSDate* end; diff --git a/CrossProcess/BCTimer.m b/CrossProcess/BCTimer.m index 6ea96a1..b8342fa 100644 --- a/CrossProcess/BCTimer.m +++ b/CrossProcess/BCTimer.m @@ -2,7 +2,7 @@ // BCTimer.m // CrossProcess // -// Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCTimer.h" @@ -35,12 +35,12 @@ - (void) stopTimer self.end = [NSDate date]; } -- (CGFloat) timeElapsedInSeconds +- (CGFloat) timeElapsedInSeconds { return [self.end timeIntervalSinceDate: self.start]; } -- (CGFloat) timeElapsedInMilliseconds +- (CGFloat) timeElapsedInMilliseconds { return [self timeElapsedInSeconds] * 1000.0f; } diff --git a/CrossProcess/BCUtilities.h b/CrossProcess/BCUtilities.h index 71d0e3f..8fd2cab 100644 --- a/CrossProcess/BCUtilities.h +++ b/CrossProcess/BCUtilities.h @@ -2,7 +2,7 @@ // BCUtilities.h // Baboon // -// Copyright Banana Camera Company 2010 - 2011. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/BCUtilities.m b/CrossProcess/BCUtilities.m index 571d25f..80bb500 100644 --- a/CrossProcess/BCUtilities.m +++ b/CrossProcess/BCUtilities.m @@ -2,7 +2,7 @@ // BCUtilities.mm // Baboon // -// Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "BCUtilities.h" @@ -11,10 +11,10 @@ CGRect CenterRectOverRect(CGRect a, CGRect b) { CGPoint centerB; CGPoint centerA; - + centerB = CGPointMake(CGRectGetMidX(b), CGRectGetMidY(b)); centerA = CGPointMake(CGRectGetMidX(a), CGRectGetMidY(a)); - + return CGRectOffset(a, centerB.x - centerA.x, centerB.y - centerA.y); } @@ -30,15 +30,15 @@ CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) { CGFloat srcAspect = sizeToFit.width / sizeToFit.height; CGFloat dstAspect = sizeToFitInto.width / sizeToFitInto.height; - + CGSize result; - + if(fabs(srcAspect - dstAspect) < 0.01) { // Aspects are close enough result = sizeToFitInto; } - else + else { CGFloat scale = (sizeToFitInto.width / sizeToFit.width); if(sizeToFit.height * scale > sizeToFitInto.height) @@ -47,7 +47,7 @@ CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) } result = CGSizeMake(RoundEven(sizeToFit.width * scale), RoundEven(sizeToFit.height * scale)); - + while(result.width < sizeToFitInto.width || result.height < sizeToFitInto.height) { scale += 0.01; @@ -63,13 +63,13 @@ CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) { CGSize result = sizeToFit; - + if(sizeToFit.width < sizeToFit.height) { CGFloat scale = sizeToFitInto.width / sizeToFit.width; result.width = sizeToFit.width * scale; result.height = sizeToFit.height * scale; - + while(result.height < sizeToFitInto.height) { scale += 0.1; @@ -82,7 +82,7 @@ CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) CGFloat scale = sizeToFitInto.height / sizeToFit.height; result.width = sizeToFit.width * scale; result.height = sizeToFit.height * scale; - + while(result.width < sizeToFitInto.width) { scale += 0.1; @@ -90,10 +90,10 @@ CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) result.height = sizeToFit.height * scale; } } - + result.width = RoundEven(result.width); result.height = RoundEven(result.height); - + return result; } */ @@ -101,10 +101,10 @@ CGSize FitSizeWithSize(CGSize sizeToFit, CGSize sizeToFitInto) CGFloat RoundEven(CGFloat a) { long int result = lrintf(a); - + if(result % 2 ) result += 1; - + return((CGFloat)result); } @@ -129,14 +129,14 @@ CGColorRef CreateDeviceRGBColor(CGFloat r, CGFloat g, CGFloat b, CGFloat a) CGAffineTransform AdjustedTransform(UIImageOrientation orientation, CGFloat width, CGFloat height) { CGAffineTransform result = CGAffineTransformIdentity; - + if(orientation != UIImageOrientationUp && width > 0.0 && height > 0.0) { switch(orientation) { - case UIImageOrientationDown: + case UIImageOrientationDown: { - result = CGAffineTransformMake(-1, 0, 0, -1, width, height); + result = CGAffineTransformMake(-1, 0, 0, -1, width, height); break; } case UIImageOrientationLeft: @@ -144,39 +144,39 @@ CGAffineTransform AdjustedTransform(UIImageOrientation orientation, CGFloat widt result = CGAffineTransformMake(0, height/width, -width/height, 0, width, 0); break; } - case UIImageOrientationRight: + case UIImageOrientationRight: { - result = CGAffineTransformMake(0, -height/width, width/height, 0, 0, height); + result = CGAffineTransformMake(0, -height/width, width/height, 0, 0, height); break; } case UIImageOrientationUpMirrored: { - result = CGAffineTransformMake(-1, 0, 0, 1, width, 0); + result = CGAffineTransformMake(-1, 0, 0, 1, width, 0); break; } case UIImageOrientationDownMirrored: { - result = CGAffineTransformMake( 1, 0, 0, -1, 0, height); + result = CGAffineTransformMake( 1, 0, 0, -1, 0, height); break; } - case UIImageOrientationLeftMirrored: + case UIImageOrientationLeftMirrored: { - result = CGAffineTransformMake( 0, -height/width, -width/height, 0, width, height); + result = CGAffineTransformMake( 0, -height/width, -width/height, 0, width, height); break; } - case UIImageOrientationRightMirrored: + case UIImageOrientationRightMirrored: { - result = CGAffineTransformMake( 0, height/width, width/height, 0, 0, 0); + result = CGAffineTransformMake( 0, height/width, width/height, 0, 0, 0); break; } - default: + default: { - result = CGAffineTransformIdentity; + result = CGAffineTransformIdentity; break; } } } - + return result; } diff --git a/CrossProcess/CPAppConstants.h b/CrossProcess/CPAppConstants.h index 4ec42cc..6ab3e7b 100644 --- a/CrossProcess/CPAppConstants.h +++ b/CrossProcess/CPAppConstants.h @@ -2,7 +2,7 @@ // CPAppConstants.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // extern NSString* const CPFirstLaunchKey; diff --git a/CrossProcess/CPAppConstants.m b/CrossProcess/CPAppConstants.m index 4c5a4e4..1b10ccb 100644 --- a/CrossProcess/CPAppConstants.m +++ b/CrossProcess/CPAppConstants.m @@ -2,7 +2,7 @@ // CPAppConstants.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "CPAppConstants.h" diff --git a/CrossProcess/CPAppDelegate.h b/CrossProcess/CPAppDelegate.h index f0a1b38..6623dcc 100644 --- a/CrossProcess/CPAppDelegate.h +++ b/CrossProcess/CPAppDelegate.h @@ -2,7 +2,7 @@ // CPAppDelegate.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/CPAppDelegate.m b/CrossProcess/CPAppDelegate.m index cd62646..6bd62c8 100644 --- a/CrossProcess/CPAppDelegate.m +++ b/CrossProcess/CPAppDelegate.m @@ -2,7 +2,7 @@ // CPAppDelegate.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "CPAppDelegate.h" @@ -32,7 +32,7 @@ @implementation CPAppDelegate - (NSString*)version { NSDictionary *infoDictionary = [[NSBundle mainBundle]infoDictionary]; - + NSString *version = infoDictionary[@"CFBundleShortVersionString"]; NSString *build = infoDictionary[(NSString*)kCFBundleVersionKey]; NSString *bundleName = infoDictionary[(NSString *)kCFBundleNameKey]; @@ -41,18 +41,18 @@ - (NSString*)version { - (BOOL) application: (UIApplication*) application didFinishLaunchingWithOptions: (NSDictionary*) launchOptions { - + [Fabric with:@[[Crashlytics class]]]; - + NSLog(@"[CPAppDelegate] didFinishLaunchingWithOptions - %@", [self version]); - + self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; self.viewController = [[CPViewController alloc] initWithNibName:@"CPViewController" bundle:nil]; self.viewController.applicationLaunching = YES; self.window.rootViewController = self.viewController; [self.window makeKeyAndVisible]; - + if([UIImagePickerController isSourceTypeAvailable: UIImagePickerControllerSourceTypeCamera] == YES) { self.locationManager = [[CLLocationManager alloc] init]; @@ -61,22 +61,22 @@ - (BOOL) application: (UIApplication*) application didFinishLaunchingWithOptions self.locationManager.distanceFilter = 5.0f; [self.locationManager startUpdatingLocation]; } - + self.workQueue = [[NSOperationQueue alloc] init]; - + /* OBSOLETE self.imageCreator = [[CPScaledImageCreator alloc] init]; self.imageCreator.queuePriority = NSOperationQueuePriorityNormal; [self.imageCreator addObserver: self forKeyPath: @"isFinished" options: 0 context: &_imageCreator]; [self.workQueue addOperation: self.imageCreator]; */ - + if(![self pManageFirstLaunchScenario]) { } - + //self.assetLibrary = [[ALAssetsLibrary alloc] init]; - + return YES; } @@ -94,7 +94,7 @@ - (void) applicationWillEnterForeground: (UIApplication*) application { self.appInBackground = NO; [self.locationManager startUpdatingLocation]; - + [self.viewController presentDefaultPhotoController]; } @@ -119,7 +119,7 @@ - (void) observeValueForKeyPath: (NSString*) keyPath ofObject: (id) object chang // cleanup self.imageCreator = nil; - } + } else { [super observeValueForKeyPath: keyPath ofObject: object change: change context: context]; @@ -132,7 +132,7 @@ - (NSURL*) youTubeHelpURL { _youTubeHelpURL = [NSURL URLWithString: @"http://www.youtube.com/watch?v=0w2jUZrkHiE"]; } - + return _youTubeHelpURL; } @@ -143,24 +143,24 @@ - (NSURL*) appSupportURL NSString* bundleID = [[NSBundle mainBundle] bundleIdentifier]; NSFileManager* fm = [NSFileManager defaultManager]; NSURL* dirURL = nil; - + // Find the application support directory in the home directory. NSArray* appSupportDir = [fm URLsForDirectory: NSApplicationSupportDirectory inDomains: NSUserDomainMask]; if([appSupportDir count] > 0) { // Append the bundle ID to the URL for the application support directory dirURL = [[appSupportDir objectAtIndex: 0] URLByAppendingPathComponent: bundleID]; - + NSError* error = nil; BOOL created = NO; - + created = [fm createDirectoryAtPath: [dirURL path] withIntermediateDirectories: YES attributes: nil error: &error]; assert(created && !error); } - + _appSupportURL = dirURL; } - + return _appSupportURL; } @@ -175,16 +175,16 @@ - (BOOL) pManageFirstLaunchScenario { NSUserDefaults* userDefaults = [NSUserDefaults standardUserDefaults]; BOOL isFirstLaunch = [userDefaults boolForKey: CPFirstLaunchKey] == NO; - + #ifdef DEBUG isFirstLaunch = YES; #endif - + if(isFirstLaunch) { self.viewController.shouldShowWelcomeScreen = YES; [userDefaults setBool: YES forKey: CPFirstLaunchKey]; - + // Setup standard defaults [userDefaults setBool: NO forKey: CPFullSizeImageOptionKey]; [userDefaults setBool: NO forKey: CPKeepOriginalOptionKey]; @@ -195,7 +195,7 @@ - (BOOL) pManageFirstLaunchScenario [userDefaults setBool: YES forKey: CPBasicProcessingOptionKey]; [userDefaults setBool: NO forKey: CPExtremeProcessingOptionKey]; } - + return isFirstLaunch; } diff --git a/CrossProcess/CPImageProcessor.h b/CrossProcess/CPImageProcessor.h index 2913990..38f7631 100644 --- a/CrossProcess/CPImageProcessor.h +++ b/CrossProcess/CPImageProcessor.h @@ -2,7 +2,7 @@ // CPImageProcessor.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -26,7 +26,7 @@ - (id) initWithImage: (UIImage*) image metadata: (NSDictionary*) imageMetadata assetLibraryURL: (NSURL*) assetURL - scale: (CGFloat) scale + scale: (CGFloat) scale cropRect: (CGRect) cropRect wasCaptured: (BOOL) wasCaptured; diff --git a/CrossProcess/CPImageProcessor.m b/CrossProcess/CPImageProcessor.m index c26c048..6956e4d 100644 --- a/CrossProcess/CPImageProcessor.m +++ b/CrossProcess/CPImageProcessor.m @@ -2,7 +2,7 @@ // CPImageProcessor.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "CPImageProcessor.h" @@ -60,7 +60,7 @@ @implementation CPImageProcessor - (id) initWithImage: (UIImage*) image metadata: (NSDictionary*) imageMetadata assetLibraryURL: (NSURL*) assetURL - scale: (CGFloat) scale + scale: (CGFloat) scale cropRect: (CGRect) cropRect wasCaptured: (BOOL) wasCaptured { @@ -72,17 +72,17 @@ - (id) initWithImage: (UIImage*) image self.scale = scale; self.cropRect = cropRect; self.wasCaptured = wasCaptured; - + CGSize imageSize = image.size; self.portraitOrientation = imageSize.height >= imageSize.width; self.useBorder = [[NSUserDefaults standardUserDefaults] boolForKey: CPWantsBorderOptionKey]; - self.imageAssets = [NSDictionary dictionaryWithContentsOfFile: [[NSBundle mainBundle] pathForResource: @"image_assets" + self.imageAssets = [NSDictionary dictionaryWithContentsOfFile: [[NSBundle mainBundle] pathForResource: @"image_assets" ofType: @"plist"]]; CPAppDelegate* appDelegate = BCCastAsClass(CPAppDelegate, [[UIApplication sharedApplication] delegate]); _appSupportURL = [appDelegate appSupportURL]; } - + return self; } @@ -91,10 +91,10 @@ - (void) main NSLog(@"[CPIP] main"); BCTimer* timer = [[BCTimer alloc] init]; [timer startTimer]; - + // This method is called by a thread that's set up for us by the NSOperationQueue. assert(![NSThread isMainThread]); - + if(!self.imageToProcess) { NSLog(@"No image to process!"); @@ -102,22 +102,22 @@ - (void) main else { UIBackgroundTaskIdentifier backgroundIdent = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{}]; - + CGFloat scale = self.scale; CGSize imageSize = [self pFindAppropriateImageSize]; NSDictionary* imageAssets = [self pImageAssetsForImageSize: imageSize scale: scale]; - + NSLog(@" - imageSize = %@", NSStringFromCGSize(imageSize)); NSLog(@" - Assets = %@", imageAssets); - + if(!imageAssets) { NSAssert(false, @"MISSING IMAGE ASSETS %@", NSStringFromCGSize(imageSize)); } - + else { // We don't scale FFC images or iPod Touch image sizes - + if(self.portraitOrientation) { if(CGSizeEqualToSize(imageSize, CPFFCNativeImageSize) || @@ -137,13 +137,13 @@ - (void) main scale = 1.0; } } - + // NSLog(@"Scale is: %f", scale); - - BCImage* processedImage = [[BCImage alloc] initWithSize: self.imageToProcess.size + + BCImage* processedImage = [[BCImage alloc] initWithSize: self.imageToProcess.size scale: scale orientation: self.imageToProcess.imageOrientation]; - + // NSLog(@"Processed Image %@", processedImage); if(processedImage) { @@ -155,45 +155,45 @@ - (void) main 5. Apply the screen image 6. If the settings requested a border, apply the final border */ - + // NSLog(@" - doing the stuff"); - + CGSize sourceImageSize = self.imageToProcess.size; - + [processedImage pushContext]; - + CGAffineTransform scaleXForm = CGAffineTransformMakeScale(scale, scale); CGContextSetInterpolationQuality(processedImage.context, kCGInterpolationHigh); CGContextConcatCTM(processedImage.context, scaleXForm); - CGContextConcatCTM(processedImage.context, AdjustedTransform(self.imageToProcess.imageOrientation, - sourceImageSize.width, + CGContextConcatCTM(processedImage.context, AdjustedTransform(self.imageToProcess.imageOrientation, + sourceImageSize.width, sourceImageSize.height)); - + // NSLog(@" - drawing"); - + /* CGRect imageRect = CGRectZero; imageRect.size = FitSizeWithSize(sourceImageSize, imageSize); - imageRect = Copyright Banana Camera Company 2010 - 2012OverRect(imageRect, CGRectMake(0, 0, imageSize.width, imageSize.height)); + imageRect = Copyright 2019 Zinc Collective LLCOverRect(imageRect, CGRectMake(0, 0, imageSize.width, imageSize.height)); */ - - CGContextDrawImage(processedImage.context, + + CGContextDrawImage(processedImage.context, CGRectMake(0.0, 0.0, sourceImageSize.width, sourceImageSize.height), self.imageToProcess.CGImage); - + //CGContextSetBlendMode(resultImage.context, kCGBlendModeOverlay); //CGContextDrawImage(resultImage.context, CGRectMake(0.0, 0.0, sourceSize.width, sourceSize.height), image.CGImage); - + [processedImage popContext]; - + // NSLog(@" - curves?"); - + NSString* curvesName = [self.curvesPath lastPathComponent]; - + CGSize processedImageSize = processedImage.size; CGRect processedImageRect = CGRectMake(0.0, 0.0, processedImageSize.width, processedImageSize.height); - - + + if([curvesName isEqualToString: @"negative.acv"] == NO) { // NSLog(@" - drawing vignette"); @@ -203,19 +203,19 @@ - (void) main inImage: processedImage finalImageSize: processedImage.size]; } - + if(self.curvesPath) { // NSLog(@" - applying curves"); NSArray* imageCurves = [BCImageCurve imageCurvesFromACV: self.curvesPath]; [processedImage applyCurves: imageCurves]; } - + CGContextSaveGState(processedImage.context); - + CGColorRef greyColor = CreateDeviceGrayColor(0.8f, 1.0f); CGFloat alpha = 1.0; - + if([curvesName isEqualToString: @"basic.acv"]) { alpha = 0.10f; @@ -240,46 +240,46 @@ - (void) main { alpha = 0.20f; } - - + + // NSLog(@" - applying alpha and blend"); CGContextSetFillColorWithColor(processedImage.context, greyColor); CGContextSetBlendMode(processedImage.context, kCGBlendModeColor); CGContextSetAlpha(processedImage.context, alpha); CGContextFillRect(processedImage.context, processedImageRect); - + CGContextRestoreGState(processedImage.context); CGColorRelease(greyColor); - - + + // NSLog(@" - drawing screen"); - + [self pDrawImageAtPath: [imageAssets objectForKey: @"screen"] blendMode: kCGBlendModeScreen alpha: 1.0 inImage: processedImage finalImageSize: processedImage.size]; - + if(self.useBorder) { // NSLog(@" - drawing border"); [self pDrawImageAtPath: [imageAssets objectForKey: @"border"] blendMode: kCGBlendModeNormal alpha: 1.0 - inImage: processedImage + inImage: processedImage finalImageSize: processedImage.size]; } - - + + // NSLog(@" - done!"); self.processedImage = processedImage; } - - + + // NSLog(@" - stopping timer"); [timer stopTimer]; [timer logElapsedInMilliseconds: @"Time to process image"]; - + [[UIApplication sharedApplication] endBackgroundTask: backgroundIdent]; } } @@ -316,7 +316,7 @@ - (CGSize) pFindAppropriateImageSize else { computedSize = imageSizes[0]; - + for(NSInteger i = 0; i < 5; ++i) { if((imageSize.width <= imageSizes[i].width) && (imageSize.height <= imageSizes[i].height)) @@ -342,7 +342,7 @@ - (CGSize) pFindAppropriateImageSize { computedSize.width = imageSizes[0].height; computedSize.height = imageSizes[0].width; - + for(NSInteger i = 0; i < 5; ++i) { if((imageSize.width <= imageSizes[i].height) && (imageSize.height <= imageSizes[i].width)) @@ -355,7 +355,7 @@ - (CGSize) pFindAppropriateImageSize } } } - + return computedSize; } @@ -380,23 +380,23 @@ - (void) pDrawImageAtPath: (NSString*) path if(imageRef) { CGContextSaveGState(image.context); - + CGContextSetAlpha(image.context, alpha); CGContextSetBlendMode(image.context, blendMode); - + CGRect imageRect = CGRectMake(0, 0, CGImageGetWidth(imageRef), CGImageGetHeight(imageRef)); - + // NSLog(@" - finished first context stuff"); - + // Rotate the portrait asset to landscape if necessary as we only store assets in portrait orientation - + if(!self.portraitOrientation) { // NSLog(@" - landscape"); CGFloat hScale = finalSize.width / imageRect.size.height; CGFloat vScale = finalSize.height / imageRect.size.width; CGContextScaleCTM(image.context, hScale, vScale); - + CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, imageRect.size.width); transform = CGAffineTransformRotate(transform, radians(-90)); CGContextConcatCTM(image.context, transform); @@ -406,20 +406,20 @@ - (void) pDrawImageAtPath: (NSString*) path // NSLog(@" - portrait"); CGFloat hScale = finalSize.width / imageRect.size.width; CGFloat vScale = finalSize.height / imageRect.size.height; - + CGContextScaleCTM(image.context, hScale, vScale); } - + // TODO: Apply scale if image isn't the correct size. - + // NSLog(@" - draw"); CGContextDrawImage(image.context, imageRect, imageRef); - + CGImageRelease(imageRef); CGContextRestoreGState(image.context); // NSLog(@" - done"); } - + CGDataProviderRelease(dataProvider); } } @@ -463,7 +463,7 @@ - (NSDictionary*) pImageAssetsForImageSize: (CGSize) imageSize scale: (CGFloat) // replace with larger if desired result = [self.imageAssets objectForKey: @"2448x3264"]; } - + if(scale == 0.5) { result = [result objectForKey: @"50%"]; @@ -472,7 +472,7 @@ - (NSDictionary*) pImageAssetsForImageSize: (CGSize) imageSize scale: (CGFloat) { result = [result objectForKey: @"100%"]; } - + return result; } @@ -482,15 +482,15 @@ - (NSData*) pLoadAsset: (NSURL*) assetURL NSString* asset = [[[assetURL path] lastPathComponent] stringByDeletingPathExtension]; NSBundle* mainBundle = [NSBundle mainBundle]; NSError* error = nil; - - data = [NSData dataWithContentsOfFile: [mainBundle pathForResource: asset ofType: @"png"] - options: NSDataReadingMappedIfSafe + + data = [NSData dataWithContentsOfFile: [mainBundle pathForResource: asset ofType: @"png"] + options: NSDataReadingMappedIfSafe error: &error]; if(!data || error) { NSLog(@"Failed to load asset: %@. Error: %@", asset, error); } - + return data; } @@ -498,18 +498,18 @@ - (NSData*) pLoadAsset: (NSURL*) assetURL - (NSData*) pLoadAsset: (NSURL*) assetURL { NSData* data = nil; - + if(assetURL) { NSError* error = nil; NSData* data = [NSData dataWithContentsOfURL: assetURL options: NSDataReadingMappedIfSafe error: &error]; - + if(!data || error) { NSString* path = [assetURL path]; NSBundle* mainBundle = [NSBundle mainBundle]; NSString* imageName = nil; - + if([path rangeOfString: @"border"].length > 0) { imageName = @"border"; @@ -522,11 +522,11 @@ - (NSData*) pLoadAsset: (NSURL*) assetURL { imageName = @"screen"; } - + if(imageName) { - data = [NSData dataWithContentsOfFile: [mainBundle pathForResource: imageName ofType: @"png"] - options: NSDataReadingMappedIfSafe + data = [NSData dataWithContentsOfFile: [mainBundle pathForResource: imageName ofType: @"png"] + options: NSDataReadingMappedIfSafe error: &error]; } } @@ -535,7 +535,7 @@ - (NSData*) pLoadAsset: (NSURL*) assetURL { NSLog(@"Unable to load asset at url: %@", assetURL); } - + return data; } */ @@ -543,7 +543,7 @@ - (NSData*) pLoadAsset: (NSURL*) assetURL - (CGSize) pAssetSizeForName: (NSString*) assetName assetClass: (NSString*) assetClass { CGSize result = CGSizeZero; - + for(NSDictionary* level1Items in [self.imageAssets allValues]) { for(NSDictionary* level2Items in [level1Items allValues]) @@ -554,13 +554,13 @@ - (CGSize) pAssetSizeForName: (NSString*) assetName assetClass: (NSString*) asse break; } } - + if(!CGSizeEqualToSize(result, CGSizeZero)) { break; } - } - + } + return result; } @@ -571,20 +571,20 @@ - (NSData*) pLoadAndCacheAsset: (NSURL*) assetURL static BOOL sLocked = NO; NSData* data = nil; - + if(assetURL) { NSError* error = nil; data = [NSData dataWithContentsOfURL: assetURL options: NSDataReadingMappedIfSafe error: &error]; // data doesn't exist so lets generate and cache the asset. - + if((!data || error) && !sLocked) { NSString* path = [assetURL path]; NSString* assetClass = nil; UIImage* image = nil; - + if([path rangeOfString: @"border"].length > 0) { image = [UIImage imageNamed: @"border"]; @@ -600,20 +600,20 @@ - (NSData*) pLoadAndCacheAsset: (NSURL*) assetURL image = [UIImage imageNamed: @"screen"]; assetClass = @"screen"; } - + if(image) { NSString* assetName = [[assetURL path] lastPathComponent]; CGSize assetSize = [self pAssetSizeForName: (NSString*) assetName assetClass: assetClass]; [image createScaledImage: assetSize atURL: assetURL]; - + sLocked = YES; data = [self pLoadAndCacheAsset: assetURL]; sLocked = NO; } } } - + return data; } diff --git a/CrossProcess/CPOptionsViewController.h b/CrossProcess/CPOptionsViewController.h index ab1662c..f741092 100644 --- a/CrossProcess/CPOptionsViewController.h +++ b/CrossProcess/CPOptionsViewController.h @@ -2,14 +2,14 @@ // CPOptionsViewController.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @protocol CPOptionsViewControllerDelegate; -@interface CPOptionsViewController : UIViewController< UITableViewDelegate, +@interface CPOptionsViewController : UIViewController< UITableViewDelegate, UITableViewDataSource, UINavigationBarDelegate, UIWebViewDelegate> diff --git a/CrossProcess/CPOptionsViewController.m b/CrossProcess/CPOptionsViewController.m index 56019a2..e23dd6c 100644 --- a/CrossProcess/CPOptionsViewController.m +++ b/CrossProcess/CPOptionsViewController.m @@ -2,7 +2,7 @@ // CPOptionsViewController.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "CPOptionsViewController.h" @@ -74,7 +74,7 @@ - (void) viewDidUnload self.saveOriginalCell = nil; self.infoCell = nil; self.communityCell = nil; - + [super viewDidUnload]; } @@ -94,12 +94,12 @@ - (IBAction) done: (id) sender - (CGFloat) tableView: (UITableView*) tableView heightForHeaderInSection: (NSInteger) section { CGFloat height = 26.0; - + if(section == 0) { height = 30.0; } - + return height; } @@ -107,7 +107,7 @@ - (UIView*) tableView: (UITableView*) tableView viewForHeaderInSection: (NSInteg { UIView* customView = [[UIView alloc] initWithFrame: CGRectMake(0, 0.0, 300.0, 26.0)]; UILabel* headerLabel = [[UILabel alloc] initWithFrame: CGRectZero]; - + headerLabel.backgroundColor = [UIColor clearColor]; headerLabel.opaque = NO; headerLabel.textColor = [UIColor darkGrayColor]; @@ -115,7 +115,7 @@ - (UIView*) tableView: (UITableView*) tableView viewForHeaderInSection: (NSInteg headerLabel.shadowOffset = CGSizeMake(0, 1); headerLabel.font = [UIFont boldSystemFontOfSize: 16]; headerLabel.textAlignment = NSTextAlignmentLeft; - + if(section == 0) { NSString* processHeaderText = NSLocalizedString(@"processHeaderText", @"Options - Process Header label"); @@ -128,7 +128,7 @@ - (UIView*) tableView: (UITableView*) tableView viewForHeaderInSection: (NSInteg headerLabel.text = extrasHeaderText; headerLabel.frame = CGRectMake(20, -4, 300, 26.0); } - + [customView addSubview:headerLabel]; return customView; } @@ -154,7 +154,7 @@ - (NSInteger) numberOfSectionsInTableView: (UITableView*) tableView - (NSInteger) tableView: (UITableView*) table numberOfRowsInSection: (NSInteger) section { NSInteger numRows = 0; - + if(section == 0) { numRows = 5; @@ -163,11 +163,11 @@ - (NSInteger) tableView: (UITableView*) table numberOfRowsInSection: (NSInteger) { numRows = 5; } - + return numRows; } -- (void) tableView: (UITableView*) tableView didSelectRowAtIndexPath: (NSIndexPath*) indexPath +- (void) tableView: (UITableView*) tableView didSelectRowAtIndexPath: (NSIndexPath*) indexPath { if(indexPath.section == 1 && indexPath.row == 3) { @@ -184,7 +184,7 @@ - (void) tableView: (UITableView*) tableView didSelectRowAtIndexPath: (NSIndexPa - (UITableViewCell*) tableView: (UITableView*) tableView cellForRowAtIndexPath: (NSIndexPath*) indexPath { UITableViewCell* cell = nil; - + switch(indexPath.section) { case 0: @@ -233,7 +233,7 @@ - (UITableViewCell*) tableView: (UITableView*) tableView cellForRowAtIndexPath: { cell = self.communityCell; } - + break; } default: @@ -241,7 +241,7 @@ - (UITableViewCell*) tableView: (UITableView*) tableView cellForRowAtIndexPath: break; } } - + return cell; } @@ -254,31 +254,31 @@ - (IBAction) moreInfo: (id) sender self.moreInfoWebView = [[UIWebView alloc] initWithFrame: CGRectZero]; self.moreInfoWebView.scalesPageToFit = YES; self.moreInfoWebView.delegate = self; - + // Webview frame is the full size of the screen - the height of the navigation bar. - + CGRect webViewFrame = self.tableView.frame; webViewFrame.origin.x += webViewFrame.size.width; self.moreInfoWebView.frame = webViewFrame; self.moreInfoWebView.backgroundColor = [UIColor clearColor]; - } - + } + // Insert the webview as a sibling of the options view (below it - to ensure that it's also below the navigation bar) - + [self.view.superview insertSubview: self.moreInfoWebView aboveSubview: self.view]; - + // Calculate the final (animatable) frames - + CGRect newWebViewFrame = self.moreInfoWebView.frame; newWebViewFrame.origin.x -= CGRectGetWidth(newWebViewFrame); - + [UIView animateWithDuration: 0.4 animations:^ { self.moreInfoWebView.frame = newWebViewFrame; }]; - + [self.moreInfoWebView loadRequest: [NSURLRequest requestWithURL: [NSURL URLWithString: CPBananaCameraMoreAppsURL]]]; - + NSString* moreAppsTitle = NSLocalizedString(@"moreAppsTitle", @"More Apps options title"); UINavigationItem* navItem = [[UINavigationItem alloc] initWithTitle: moreAppsTitle]; [self.navigationBar pushNavigationItem: navItem animated: YES]; @@ -288,10 +288,10 @@ - (IBAction) showManual: (id) sender { id appDelegate = BCCastAsProtocol(BCAppDelegate, [[UIApplication sharedApplication] delegate]); NSURL* manualURL = [appDelegate youTubeHelpURL]; - + if(manualURL) { - [[UIApplication sharedApplication] openURL: manualURL]; + [[UIApplication sharedApplication] openURL: manualURL]; } } @@ -302,33 +302,33 @@ - (IBAction) community: (id) sender self.socialWebView = [[UIWebView alloc] initWithFrame: CGRectZero]; self.socialWebView.scalesPageToFit = YES; self.socialWebView.delegate = self; - + // Webview frame is the full size of the screen - the height of the navigation bar. - + CGRect webViewFrame = self.tableView.frame; webViewFrame.origin.x += webViewFrame.size.width; self.socialWebView.frame = webViewFrame; self.socialWebView.backgroundColor = [UIColor clearColor]; - } - + } + // Insert the webview as a sibling of the options view (below it - to ensure that it's also below the navigation bar) - + [self.view.superview insertSubview: self.socialWebView aboveSubview: self.view]; - + // Calculate the final (animatable) frames - + CGRect newWebViewFrame = self.socialWebView.frame; newWebViewFrame.origin.x -= CGRectGetWidth(newWebViewFrame); - + // Animate them into place. - + [UIView animateWithDuration: 0.4 animations: ^ { self.socialWebView.frame = newWebViewFrame; }]; - + [_socialWebView loadRequest: [NSURLRequest requestWithURL: [NSURL URLWithString: CPBananaCameraSocialURL]]]; - + NSString* communityTitle = NSLocalizedString(@"communityTitle", @"Community options title"); UINavigationItem* navItem = [[UINavigationItem alloc] initWithTitle: communityTitle]; [self.navigationBar pushNavigationItem: navItem animated: YES]; @@ -337,7 +337,7 @@ - (IBAction) community: (id) sender - (IBAction) keepOriginal: (id) sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPKeepOriginalOptionKey]; @@ -347,7 +347,7 @@ - (IBAction) keepOriginal: (id) sender - (IBAction) useBorder: (id) sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPWantsBorderOptionKey]; @@ -357,7 +357,7 @@ - (IBAction) useBorder: (id) sender - (IBAction) useRedCurve:(id)sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPRedProcessingOptionKey]; @@ -367,7 +367,7 @@ - (IBAction) useRedCurve:(id)sender - (IBAction) useBlueCurve:(id)sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPBlueProcessingOptionKey]; @@ -377,7 +377,7 @@ - (IBAction) useBlueCurve:(id)sender - (IBAction) useGreenCurve:(id)sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPGreenProcessingOptionKey]; @@ -387,7 +387,7 @@ - (IBAction) useGreenCurve:(id)sender - (IBAction) useBasicCurve:(id)sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPBasicProcessingOptionKey]; @@ -397,7 +397,7 @@ - (IBAction) useBasicCurve:(id)sender - (IBAction) useExtremeCurve:(id)sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPExtremeProcessingOptionKey]; @@ -407,7 +407,7 @@ - (IBAction) useExtremeCurve:(id)sender - (IBAction) useFullSizeImage: (id) sender { UISwitch* uiSwitch = BCCastAsClass(UISwitch, sender); - + if(uiSwitch) { [[NSUserDefaults standardUserDefaults] setBool: uiSwitch.isOn forKey: CPFullSizeImageOptionKey]; @@ -417,7 +417,7 @@ - (IBAction) useFullSizeImage: (id) sender - (void) pUpdateControls { NSUserDefaults* defaults = [NSUserDefaults standardUserDefaults]; - + BCCastAsClass(UISwitch, [self.fullsizeImageCell viewWithTag: CPSwitchViewTag]).on = [defaults boolForKey: CPFullSizeImageOptionKey]; BCCastAsClass(UISwitch, [self.saveOriginalCell viewWithTag: CPSwitchViewTag]).on = [defaults boolForKey: CPKeepOriginalOptionKey]; BCCastAsClass(UISwitch, [self.borderCell viewWithTag: CPSwitchViewTag]).on = [defaults boolForKey: CPWantsBorderOptionKey]; @@ -434,15 +434,15 @@ - (void) pAdjustLabelStrings BCCastAsClass(UILabel, [self.saveOriginalCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"saveOriginalLabel", @"Save Original Label"); BCCastAsClass(UILabel, [self.borderCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"borderLabel", "Border Label"); BCCastAsClass(UILabel, [self.fullsizeImageCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"fullSizeImagesLabel", "Full Size Images Label"); - + BCCastAsClass(UILabel, [self.communityCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"communityLabel", "Community Label"); BCCastAsClass(UILabel, [self.infoCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"moreAppsLabel", "More Apps Label"); - + BCCastAsClass(UILabel, [self.redCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"redLabel", "Red Label"); BCCastAsClass(UILabel, [self.greenCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"greenLabel", "Green Label"); BCCastAsClass(UILabel, [self.blueCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"blueLabel", "Blue Label"); BCCastAsClass(UILabel, [self.basicCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"basicLabel", "Basic Label"); - BCCastAsClass(UILabel, [self.extremeCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"extremeLabel", "Extreme Label"); + BCCastAsClass(UILabel, [self.extremeCell viewWithTag: CPLabelViewTag]).text = NSLocalizedString(@"extremeLabel", "Extreme Label"); } #pragma mark - @@ -452,16 +452,16 @@ - (BOOL) navigationBar: (UINavigationBar*) navigationBar shouldPopItem: (UINavig { NSString* communityTitle = NSLocalizedString(@"communityTitle", @"Community options title"); NSString* moreAppsTitle = NSLocalizedString(@"moreAppsTitle", @"More Apps options title"); - + if([item.title isEqualToString: moreAppsTitle]) { // Calculate the final (animatable) frames - + CGRect newWebViewFrame = self.moreInfoWebView.frame; newWebViewFrame.origin.x += CGRectGetWidth(newWebViewFrame); - + // Animate them into place. - + [UIView animateWithDuration: 0.4 animations:^ { self.moreInfoWebView.frame = newWebViewFrame; @@ -474,12 +474,12 @@ - (BOOL) navigationBar: (UINavigationBar*) navigationBar shouldPopItem: (UINavig else if([item.title isEqualToString: communityTitle]) { // Calculate the final (animatable) frames - + CGRect newWebViewFrame = self.socialWebView.frame; newWebViewFrame.origin.x += CGRectGetWidth(newWebViewFrame); - + // Animate them into place. - + [UIView animateWithDuration: 0.4 animations:^ { self.socialWebView.frame = newWebViewFrame; @@ -489,7 +489,7 @@ - (BOOL) navigationBar: (UINavigationBar*) navigationBar shouldPopItem: (UINavig [self.socialWebView removeFromSuperview]; }]; } - + return YES; } diff --git a/CrossProcess/CPViewController.h b/CrossProcess/CPViewController.h index 0543a62..d17243a 100644 --- a/CrossProcess/CPViewController.h +++ b/CrossProcess/CPViewController.h @@ -2,7 +2,7 @@ // CPViewController.h // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -23,7 +23,7 @@ extern const NSInteger CPWelcomeViewTag; extern const NSInteger CPToolbarTag; -@interface CPViewController : UIViewController< BCImageCaptureControllerDelegate, +@interface CPViewController : UIViewController< BCImageCaptureControllerDelegate, CPOptionsViewControllerDelegate, UIActionSheetDelegate, MFMailComposeViewControllerDelegate, diff --git a/CrossProcess/CPViewController.m b/CrossProcess/CPViewController.m index 848d09c..2175cdf 100644 --- a/CrossProcess/CPViewController.m +++ b/CrossProcess/CPViewController.m @@ -2,7 +2,7 @@ // CPViewController.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "CPViewController.h" @@ -60,7 +60,7 @@ - (void) pBeginProcessingPhoto: (CGSize) imageSize; - (void) pImageProcessorDone: (CPImageProcessor*) imageProcessor; - (void) pWriteImageToPhotoLibrary: (BCImage*) image metadata: (NSDictionary*) metadata gpsData: (NSDictionary*) gpsData; - (void) pGatherOriginalLocation: (NSURL*) assetURL andWriteToPhotoLibrary: (BCImage*) image; -- (void) pWriteCGImageToSavedPhotosAlbum: (CGImageRef) cgImage +- (void) pWriteCGImageToSavedPhotosAlbum: (CGImageRef) cgImage metadata: (NSDictionary*) metadata gpsData: (NSDictionary*) gpsData writingOriginal: (BOOL) writingOriginal @@ -123,18 +123,18 @@ @implementation CPViewController - (void) didReceiveMemoryWarning { -#if DEBUG +#if DEBUG NSLog(@"Recieved memory warning"); #endif - + if(self.captureSound != 0) { AudioServicesDisposeSystemSoundID(self.captureSound); self.captureSound = 0; } - + self.recycledImageViews = nil; - + [super didReceiveMemoryWarning]; } @@ -146,7 +146,7 @@ - (id) initWithNibName: (NSString*) nibNameOrNil bundle: (NSBundle*) nibBundleOr { self.processedImages = [[NSMutableArray alloc] initWithCapacity: 10]; } - + return self; } @@ -165,7 +165,7 @@ - (void) viewDidLayoutSubviews #if DEBUG NSLog(@"-viewDidLayoutSubviews"); #endif - + [super viewDidLayoutSubviews]; } */ @@ -173,11 +173,11 @@ - (void) viewDidLayoutSubviews - (void) viewDidLoad { [super viewDidLoad]; - + self.debugVersionLabel.text = [(CPAppDelegate*)[[UIApplication sharedApplication] delegate] version]; - + self.view.tag = CPRootViewTag; - + //self.scrollView.frame = [self pFrameForScrollView]; [self pAdjustScrollViewFrame]; self.scrollView.tag = CPScrollViewTag; @@ -190,22 +190,22 @@ - (void) viewDidLoad self.scrollView.directionalLockEnabled = YES; self.scrollView.delegate = self; - UITapGestureRecognizer* singleTap = [[UITapGestureRecognizer alloc] initWithTarget: self + UITapGestureRecognizer* singleTap = [[UITapGestureRecognizer alloc] initWithTarget: self action: @selector(pHandleSingleTap:)]; singleTap.numberOfTapsRequired = 1; [self.scrollView addGestureRecognizer: singleTap]; - + [self setBackgroundImage]; [self pSetupPhotoCaptureSound]; - + if(self.imageCaptureController == nil) { self.imageCaptureController = [[BCImageCaptureController alloc] initWithNibName: @"BCImageCaptureController" bundle: nil]; self.imageCaptureController.delegate = self; } - + // Set the appropriate toolbar - + if([UIImagePickerController isSourceTypeAvailable: UIImagePickerControllerSourceTypeCamera] == YES) { self.toolbar = self.toolbarWithCamera; @@ -218,28 +218,28 @@ - (void) viewDidLoad self.toolbarWithCamera = nil; self.toolbarNoCamera = nil; } - + // Toolbar should be off by default self.toolbar.alpha = 0.0f; - + CGSize viewSize = self.view.frame.size; CGSize toolbarSize = self.toolbar.frame.size; - + self.toolbar.frame = CGRectMake(0, viewSize.height - toolbarSize.height, viewSize.width, toolbarSize.height); self.toolbar.tag = CPToolbarTag; [self.view insertSubview: self.toolbar aboveSubview: self.scrollView]; - + self.recycledImageViews = [[NSMutableSet alloc] init]; self.visibleImageViews = [[NSMutableSet alloc] init]; - + } - (void) viewDidUnload { -#if DEBUG +#if DEBUG NSLog(@"View did unload"); #endif - + [super viewDidUnload]; if(self.captureSound != 0) @@ -256,11 +256,11 @@ - (void) viewDidUnload - (void) viewDidAppear: (BOOL) animated { [super viewDidAppear: animated]; - + if(self.applicationLaunching) { self.applicationLaunching = NO; - + if(self.shouldShowWelcomeScreen) { [self showFirstLaunchScreen]; @@ -277,7 +277,7 @@ - (void) viewDidAppear: (BOOL) animated - (void) showFirstLaunchScreen { UIView* rootView = [self.view superview]; - + UINib* welcomeNib = nil; welcomeNib = [UINib nibWithNibName: @"CPWelcome" bundle: nil]; @@ -293,24 +293,24 @@ - (void) showFirstLaunchScreen [rootView insertSubview: welcomeView aboveSubview: self.view]; welcomeView.tag = CPWelcomeViewTag; - + UILabel* welcomeMessageTitleLabel = BCCastAsClass(UILabel, [welcomeView viewWithTag: CPWelcomeLabelTitleTag]); UILabel* welcomeMessageMessageLabel = BCCastAsClass(UILabel, [welcomeView viewWithTag: CPWelcomeLabelMessageTag]); UIButton* introVideoButton = BCCastAsClass(UIButton, [welcomeView viewWithTag: CPIntroVideoButtonTag]); - + NSString* welcomeTitleString = NSLocalizedString(@"welcomeMessage", @"CP Welcome Title"); NSString* welcomeMessageString = NSLocalizedString(@"welcomeMessageTitle", @"CP Welcome Message"); NSString* introVideoButtonString = NSLocalizedString(@"introVideoText", @"CP Intro Video Button Text"); - + welcomeMessageTitleLabel.text = welcomeTitleString; welcomeMessageMessageLabel.text = welcomeMessageString; - + [introVideoButton setTitle: introVideoButtonString forState: UIControlStateNormal]; [introVideoButton setTitle: introVideoButtonString forState: UIControlStateHighlighted]; - - [UIView transitionFromView: self.view - toView: welcomeView - duration: 1.0 + + [UIView transitionFromView: self.view + toView: welcomeView + duration: 1.0 options: UIViewAnimationOptionShowHideTransitionViews | UIViewAnimationOptionTransitionCrossDissolve completion: ^(BOOL finished) { @@ -373,18 +373,18 @@ - (void) userPickedPhoto: (UIImage*) photo withAssetLibraryURL: (NSURL*) url [self pValidateToolbarItems]; // Begin an image processing operation - + [ImageMetadata fetchMetadataForURL:url found:^(NSDictionary * meta) { NSLog(@"GOT METADATA %@", meta); - + self.photoToProcess = photo; self.photoMetadata = meta; self.photoAssetLibraryURL = url; self.photoWasCaptured = NO; - + // If the scrollview's contentOffset is already 0,0 then our delegate method for scrollViewDidEndScrollingAnimation // won't be called. - + CGPoint scrollOffset = self.scrollView.contentOffset; if(CGPointEqualToPoint(CGPointZero, scrollOffset)) { @@ -406,7 +406,7 @@ - (void) userCapturedPhoto: (UIImage*) photo withMetadata: (NSDictionary*) metad [self pValidateToolbarItems]; // Begin an image processing operation - + self.photoToProcess = photo; self.photoMetadata = metadata; self.photoAssetLibraryURL = nil; @@ -433,7 +433,7 @@ - (void) userCancelled { [self pShowToolbar: NO]; } - + [self dismissViewControllerAnimated: YES completion:^{}]; [self pValidateToolbarItems]; } @@ -444,12 +444,12 @@ - (void) observeValueForKeyPath: (NSString*) keyPath ofObject: (id) object chang { CPImageProcessor* ip = BCCastAsClass(CPImageProcessor, object); // NSLog(@"[CPV] observeValueForKeyPath %i", (ip && [ip isFinished])); - + if(ip && [ip isFinished]) { [self performSelectorOnMainThread: @selector(pImageProcessorDone:) withObject: ip waitUntilDone: NO]; } - } + } else { [super observeValueForKeyPath: keyPath ofObject: object change: change context: context]; @@ -469,10 +469,10 @@ - (IBAction)handleAction:(id)sender { } - (void) shareImage:(UIImage*)image { - + NSString * shareText = @"Made with #CrossProcess"; NSURL * shareURL = [NSURL URLWithString:@"https://itunes.apple.com/us/app/cross-process/id355754066?mt=8"]; - + UIActivityViewController * share = [[UIActivityViewController alloc] initWithActivityItems:@[image, shareText, shareURL] applicationActivities:NULL]; [self presentViewController:share animated:YES completion:NULL]; } @@ -499,10 +499,10 @@ - (IBAction) showManual: (id) sender { id appDelegate = BCCastAsProtocol(BCAppDelegate, [[UIApplication sharedApplication] delegate]); NSURL* manualURL = [appDelegate youTubeHelpURL]; - + if(manualURL) { - [[UIApplication sharedApplication] openURL: manualURL]; + [[UIApplication sharedApplication] openURL: manualURL]; } } @@ -534,31 +534,31 @@ - (void) willRotateToInterfaceOrientation: (UIInterfaceOrientation) toInterfaceO { CGFloat offset = self.scrollView.contentOffset.x; CGFloat scrollViewWidth = self.scrollView.bounds.size.width; - + if(offset >= 0) { self.firstVisibleImageIndexBeforeRotation = floorf(offset / scrollViewWidth); self.percentScrolledIntoFirstVisibleImage = (offset - (self.firstVisibleImageIndexBeforeRotation * scrollViewWidth)) / scrollViewWidth; - } + } else { self.firstVisibleImageIndexBeforeRotation = 0; self.percentScrolledIntoFirstVisibleImage = offset / scrollViewWidth; - } + } } - (void) willAnimateRotationToInterfaceOrientation: (UIInterfaceOrientation) toInterfaceOrientation duration: (NSTimeInterval) duration { // recalculate contentSize based on current orientation self.scrollView.contentSize = [self pContentSizeForScrollView]; - + // adjust frames and configuration of each visible page - + for(UIView* subview in self.scrollView.subviews) { subview.frame = [self pFrameForViewAtIndex: subview.index]; } - + // adjust contentOffset to preserve page location based on values collected prior to location CGFloat scrollViewWidth = self.scrollView.bounds.size.width; CGFloat newOffset = (self.firstVisibleImageIndexBeforeRotation * scrollViewWidth) + (self.percentScrolledIntoFirstVisibleImage * scrollViewWidth); @@ -586,51 +586,51 @@ - (void) pCreateAndAnimatePlaceholderView: (CGSize) imageSize if(self.processedImages.count > 0) { NSNumber* placeholderAsset = BCCastAsClass(NSNumber, [self.processedImages objectAtIndex: 0]); - + if(placeholderAsset) { // Ensure that the # of assets we have are reflected in the scrollview size. - + self.scrollView.contentSize = [self pContentSizeForScrollView]; // Increment the visible image indices. - + for(BCImageView* view in self.visibleImageViews) { [view setIndex: view.index + 1]; } - + // Start the capture sound. - + [self pPlayPhotoCaptureSound]; // Create the new placeholder view, add it to the scrollview and animate it in. - + BCImageView* imageView = [[BCImageView alloc] initWithFrame: BCViewFrame]; - + // Only set the natural size if the photo wasn't captured. - + if(self.imageProcessor.wasCaptured == NO) { imageView.naturalSize = imageSize; } - + [self pConfigureView: imageView forIndex: 0]; [imageView useAsset: placeholderAsset]; - + CGRect initialFrame = imageView.frame; imageView.frame = CGRectOffset(initialFrame, -(initialFrame.size.width + PADDING + PADDING), 0); - + [self.scrollView addSubview: imageView]; [self.visibleImageViews addObject: imageView]; - + // Since we add a placeholder to the beginning of the array we need // to update the array indices appropriately and offset the visible image views - - [UIView animateWithDuration: 2.0 - delay: 0.0 - options: UIViewAnimationOptionCurveEaseInOut + + [UIView animateWithDuration: 2.0 + delay: 0.0 + options: UIViewAnimationOptionCurveEaseInOut animations: ^ { for(BCImageView* view in self.visibleImageViews) @@ -658,8 +658,8 @@ - (void) pHideToolbar: (BOOL) animate } else { - [UIView animateWithDuration: 0.4 - delay: 0.1 + [UIView animateWithDuration: 0.4 + delay: 0.1 options: UIViewAnimationOptionCurveEaseOut animations:^ { @@ -682,8 +682,8 @@ - (void) pShowToolbar: (BOOL) animate } else { - [UIView animateWithDuration: 0.4 - delay: 0.1 + [UIView animateWithDuration: 0.4 + delay: 0.1 options: UIViewAnimationOptionCurveEaseIn animations:^ { @@ -716,7 +716,7 @@ - (void) pValidateToolbarItems BOOL optionsEnabled = YES; BOOL pickPhotoEnabled = YES; BOOL capturePhotoEnabled = YES; - + if(self.animatingImage || self.processingImage) { actionEnabled = NO; @@ -724,12 +724,12 @@ - (void) pValidateToolbarItems pickPhotoEnabled = NO; capturePhotoEnabled = NO; } - + if(self.writingAsset || self.processedImages.count == 0) { actionEnabled = NO; } - + for(UIBarButtonItem* item in self.toolbar.items) { if(item.tag == CPActionBarButtonItemTag) @@ -742,11 +742,11 @@ - (void) pValidateToolbarItems } else if(item.tag == CPCapturePhotoBarButtonItemTag) { - item.enabled = capturePhotoEnabled; + item.enabled = capturePhotoEnabled; } else if(item.tag == CPPickPhotoBarButtonItemTag) { - item.enabled = pickPhotoEnabled; + item.enabled = pickPhotoEnabled; } } } @@ -754,48 +754,48 @@ - (void) pValidateToolbarItems - (NSMutableDictionary*) pGPSDictionary: (CLLocation*) location { NSMutableDictionary* gpsDict = [[NSMutableDictionary alloc] init]; - + if(location != nil) { CLLocationDegrees exifLatitude = location.coordinate.latitude; CLLocationDegrees exifLongitude = location.coordinate.longitude; - + [gpsDict setObject: location.timestamp forKey: (NSString*)kCGImagePropertyGPSTimeStamp]; - - if(exifLatitude < 0.0) + + if(exifLatitude < 0.0) { exifLatitude = exifLatitude*(-1); [gpsDict setObject: @"S" forKey: (NSString*)kCGImagePropertyGPSLatitudeRef]; - } + } else { [gpsDict setObject: @"N" forKey: (NSString*)kCGImagePropertyGPSLatitudeRef]; } - + [gpsDict setObject: [NSNumber numberWithFloat: exifLatitude] forKey: (NSString*)kCGImagePropertyGPSLatitude]; - + if(exifLongitude < 0.0) { exifLongitude = exifLongitude*(-1); [gpsDict setObject: @"W" forKey: (NSString*)kCGImagePropertyGPSLongitudeRef]; - } + } else { [gpsDict setObject: @"E" forKey: (NSString*)kCGImagePropertyGPSLongitudeRef]; } - + [gpsDict setObject: [NSNumber numberWithFloat: exifLongitude] forKey: (NSString*)kCGImagePropertyGPSLongitude]; } - + return gpsDict; } -- (NSMutableDictionary*) pCurrentLocation +- (NSMutableDictionary*) pCurrentLocation { return [self pGPSDictionary: self.currentLocation]; } -- (void) pWriteCGImageToSavedPhotosAlbum: (CGImageRef) cgImage +- (void) pWriteCGImageToSavedPhotosAlbum: (CGImageRef) cgImage metadata: (NSDictionary*) metadata gpsData: (NSDictionary*) gpsData writingOriginal: (BOOL) writingOriginal @@ -806,45 +806,45 @@ - (void) pWriteCGImageToSavedPhotosAlbum: (CGImageRef) cgImage BCTimer* timer = [BCTimer timer]; [timer startTimer]; #endif - + ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init]; //AppDelegate().assetLibrary; BOOL canWriteToAssetLibrary = NO; - + // We can get into a situation where the user is asked if our app can write to the photo library (ios6) // If they choose no, then we weill get a not-authorized status and have to bail. // We probably need UI here to tell the user they disable write access. - + PHAuthorizationStatus status = [PHPhotoLibrary authorizationStatus]; if(status == PHAuthorizationStatusAuthorized || status == PHAuthorizationStatusNotDetermined) { canWriteToAssetLibrary = YES; } - + if(canWriteToAssetLibrary) { NSMutableDictionary* imageMetadata = [metadata mutableCopy]; UIBackgroundTaskIdentifier backgroundIdent = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{}]; - + if(!writingOriginal) { // We always generate the processed asset as having an orientation of zero. - + NSString* orientationProperty = (__bridge NSString*)kCGImagePropertyOrientation; [imageMetadata setObject: [NSNumber numberWithInt: 0] forKey: orientationProperty]; } - - + + // if it doesn't have GPS (captured photos), insert the current location if (![imageMetadata objectForKey:(NSString*)kCGImagePropertyGPSDictionary]) { NSDictionary* gpsDict = gpsData ? gpsData : [self pCurrentLocation]; - + if(gpsDict.count > 0) { [imageMetadata setObject: gpsDict forKey: (NSString*)kCGImagePropertyGPSDictionary]; } } - - + + [library writeImageToSavedPhotosAlbum: cgImage metadata: imageMetadata completionBlock:^(NSURL* asset, NSError* error) @@ -857,7 +857,7 @@ - (void) pWriteCGImageToSavedPhotosAlbum: (CGImageRef) cgImage { writeCompletionBlock(asset, error); } - + [[UIApplication sharedApplication] endBackgroundTask: backgroundIdent]; }]; } @@ -868,10 +868,10 @@ - (void) pGatherOriginalLocation: (NSURL*) assetURL andWriteToPhotoLibrary: (BCI NSLog(@"pGatherOriginalLocation"); ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init]; __block NSMutableDictionary* gpsDict = nil; - + // Gather the EXIF data if we can... - - [library assetForURL: assetURL + + [library assetForURL: assetURL resultBlock:^(ALAsset *asset) { // NSLog(@" - got result"); @@ -881,12 +881,12 @@ - (void) pGatherOriginalLocation: (NSURL*) assetURL andWriteToPhotoLibrary: (BCI { imageMetadata = [rep metadata]; } - + gpsDict = [self pGPSDictionary: BCCastAsClass(CLLocation, [asset valueForProperty: ALAssetPropertyLocation])]; [self pWriteImageToPhotoLibrary: image metadata: imageMetadata gpsData: gpsDict]; - } + } failureBlock:^(NSError *error) - + { NSLog(@" - Error getting asset %@ -attempting to write anyway", error); // Error getting asset, but attempt to write anyways. @@ -899,9 +899,9 @@ - (void) pWriteImageToPhotoLibrary: (BCImage*) image metadata: (NSDictionary*) m NSLog(@"pWriteImageToPhotoLibrary"); self.writingAsset = YES; [self pValidateToolbarItems]; - + __block CGImageRef imageRefToWrite = image.CGImageRef; - + [self pWriteCGImageToSavedPhotosAlbum: imageRefToWrite metadata: metadata gpsData: gpsData @@ -909,21 +909,21 @@ - (void) pWriteImageToPhotoLibrary: (BCImage*) image metadata: (NSDictionary*) m completionBlock:^(NSURL* asset, NSError* error) { self.writingAsset = NO; - + if(error) { -#if DEBUG +#if DEBUG NSLog(@"%@", error.description); -#endif +#endif } else if (!asset) { NSLog(@"Missing asset! %@ %@", image, metadata); } else { - // We are always tacking onto the beginning of the image list. We stash a placeholder object at that index until we + // We are always tacking onto the beginning of the image list. We stash a placeholder object at that index until we // are finished writing to the photo library and obtaining the asset URL. - + if(self.processedImages.count > 0) { [self.processedImages replaceObjectAtIndex: 0 withObject: asset]; @@ -931,7 +931,7 @@ - (void) pWriteImageToPhotoLibrary: (BCImage*) image metadata: (NSDictionary*) m } [self pValidateToolbarItems]; - + CGImageRelease(imageRefToWrite); }]; } @@ -939,19 +939,19 @@ - (void) pWriteImageToPhotoLibrary: (BCImage*) image metadata: (NSDictionary*) m - (void) pImageProcessorDone: (CPImageProcessor*) imageProcessor { NSLog(@"[CPV] pImageProcessorDone %i %i", (imageProcessor == self.imageProcessor), self.processingImage); - + #if DEBUG assert([NSThread isMainThread]); #endif - + [imageProcessor removeObserver: self forKeyPath: @"isFinished"]; - + if(imageProcessor == self.imageProcessor && self.processingImage) { BCImage* image = imageProcessor.processedImage; NSLog(@" - processedImage %@ %@", image, NSStringFromCGSize(image.size)); BCImageView* imageView = nil; - + if([self pIsDisplayingViewForIndex: 0 visibleView: &imageView]) { [imageView useAsset: image]; @@ -969,17 +969,17 @@ - (void) pImageProcessorDone: (CPImageProcessor*) imageProcessor NSLog(@" - gathering original location"); [self pGatherOriginalLocation: imageProcessor.assetURL andWriteToPhotoLibrary: image]; } - + if(writeOriginal) { NSLog(@" - writeOriginal"); - [self pWriteCGImageToSavedPhotosAlbum: imageProcessor.imageToProcess.CGImage - metadata: imageProcessor.imageMetadata - gpsData: nil + [self pWriteCGImageToSavedPhotosAlbum: imageProcessor.imageToProcess.CGImage + metadata: imageProcessor.imageMetadata + gpsData: nil writingOriginal: YES completionBlock: NULL]; } - + self.imageProcessor = nil; self.processingImage = NO; } @@ -993,49 +993,49 @@ - (void) pBeginProcessingPhoto: (CGSize) imageSize NSLog(@"[CPV] pBeginProcessingPhoto %@", NSStringFromCGSize(imageSize)); if(self.processingImage == NO && self.photoToProcess) { - + [self clearBackgroundImage]; CGFloat scale = 0.5; - + if([[NSUserDefaults standardUserDefaults] boolForKey: CPFullSizeImageOptionKey]) { scale = 1.0; } - - self.imageProcessor = [[CPImageProcessor alloc] initWithImage: self.photoToProcess - metadata: self.photoMetadata + + self.imageProcessor = [[CPImageProcessor alloc] initWithImage: self.photoToProcess + metadata: self.photoMetadata assetLibraryURL: self.photoAssetLibraryURL - scale: scale - cropRect: CGRectZero + scale: scale + cropRect: CGRectZero wasCaptured: self.photoWasCaptured]; - + self.imageProcessor.queuePriority = NSOperationQueuePriorityVeryHigh; self.imageProcessor.curvesPath = [self pCurvesPathFromUserSetting]; - + // These are temporary instance variables - + self.photoToProcess = nil; self.photoMetadata = nil; self.photoAssetLibraryURL = nil; self.photoWasCaptured = NO; - + if(!self.imageQueue) { self.imageQueue = [[NSOperationQueue alloc] init]; } - + [self.imageProcessor addObserver: self forKeyPath: @"isFinished" options: 0 context: &self->_imageProcessor]; self.processingImage = YES; [self.imageQueue addOperation: self.imageProcessor]; - + // Start by adding an empty slot for the 0-th image and then update the scroll view content/metrics - + NSInteger placeholderAsset = [self pPlaceholderTypeFromCurveName: self.imageProcessor.curvesPath]; if(self.imageProcessor.portraitOrientation == NO) { placeholderAsset *= -1; } - + [self.processedImages insertObject: [NSNumber numberWithInteger: placeholderAsset] atIndex: 0]; [self pCreateAndAnimatePlaceholderView: imageSize]; } @@ -1052,7 +1052,7 @@ - (void) pHandleSingleTap: (UITapGestureRecognizer*) sender NSInteger lastNeededViewIndex = floorf((CGRectGetMaxX(visibleBounds)-1) / CGRectGetWidth(visibleBounds)); firstNeededViewIndex = MAX(firstNeededViewIndex, 0); lastNeededViewIndex = MIN(lastNeededViewIndex, self.processedImages.count - 1); - + // NSLog(@"firstNeededViewIndex = %d, lastNeededViewIndex = %d, processedImageAsset = %@", firstNeededViewIndex, lastNeededViewIndex, self.processedImages.count > 0 ? [self.processedImages objectAtIndex: firstNeededViewIndex] : nil); #endif @@ -1062,12 +1062,12 @@ - (void) pHandleSingleTap: (UITapGestureRecognizer*) sender - (BOOL) gestureRecognizer: (UIGestureRecognizer*) gestureRecognizer shouldReceiveTouch: (UITouch*) touch { BOOL shouldReceive = YES; - + if([touch.view isKindOfClass:[UIControl class]]) { return NO; } - + return shouldReceive; } @@ -1075,11 +1075,11 @@ - (void) pHandleWelcomeTap: (UITapGestureRecognizer*) sender { UIView* rootView = [sender.view superview]; UIView* welcomeView = [rootView viewWithTag: CPWelcomeViewTag]; - + [UIView transitionFromView: welcomeView - toView: self.view + toView: self.view duration: 1.0 - options: UIViewAnimationOptionShowHideTransitionViews | UIViewAnimationOptionTransitionCrossDissolve + options: UIViewAnimationOptionShowHideTransitionViews | UIViewAnimationOptionTransitionCrossDissolve completion: ^(BOOL finished) { [welcomeView removeFromSuperview]; @@ -1093,7 +1093,7 @@ - (void) pSetupPhotoCaptureSound { OSStatus status = kAudioServicesNoError; SystemSoundID soundID = 0; - + status = AudioServicesCreateSystemSoundID((__bridge CFURLRef)[NSURL fileURLWithPath: [[NSBundle mainBundle] pathForResource:@"CameraNoise" ofType:@"aif"]], &soundID); if(status == kAudioServicesNoError) { @@ -1115,7 +1115,7 @@ - (CPPlaceholderType) pPlaceholderTypeFromCurveName: (NSString*) curvePath { NSString* curveName = [[curvePath lastPathComponent] stringByDeletingPathExtension]; CPPlaceholderType type = CPPlaceholderBasic; - + if([curveName isEqualToString: @"red"]) { type = CPPlaceholderRed; @@ -1140,12 +1140,12 @@ - (CPPlaceholderType) pPlaceholderTypeFromCurveName: (NSString*) curvePath { type = CPPlaceholderNegative; } - + if([[NSUserDefaults standardUserDefaults] boolForKey: CPWantsBorderOptionKey]) { type |= CPPlaceholderBorder; } - + return type; } @@ -1154,7 +1154,7 @@ - (NSString*) pCurvesPathFromUserSetting NSString* path = nil; NSMutableArray* choices = [NSMutableArray arrayWithCapacity: 5]; NSUserDefaults* defaults = [NSUserDefaults standardUserDefaults]; - + if([defaults boolForKey: CPRedProcessingOptionKey]) { [choices addObject: @"red"]; @@ -1179,12 +1179,12 @@ - (NSString*) pCurvesPathFromUserSetting { [choices addObject: @"extreme"]; } - + if(choices.count == 0) { [choices addObject: @"negative"]; } - + NSInteger choiceIndex = lrand48() % choices.count; path = [[NSBundle mainBundle] pathForResource: [choices objectAtIndex: choiceIndex] ofType: @"acv"]; return path; @@ -1193,12 +1193,12 @@ - (NSString*) pCurvesPathFromUserSetting - (BOOL) pWriteOriginalImage { BOOL shouldWrite = NO; - + if(self.imageProcessor.wasCaptured && [[NSUserDefaults standardUserDefaults] boolForKey: CPKeepOriginalOptionKey]) { shouldWrite = YES; } - + return shouldWrite; } @@ -1206,7 +1206,7 @@ - (BOOL) pWriteOriginalImage - (NSString*) pFileExtensionForUTI: (NSString*) uti { NSString* extension = @""; - + if([uti isEqualToString: @"public.jpeg"]) { extension = @"jpeg"; @@ -1215,14 +1215,14 @@ - (NSString*) pFileExtensionForUTI: (NSString*) uti { extension = @"png"; } - + return extension; } - (NSString*) pMimeTypeForUTI: (NSString*) uti { NSString* mimeType = @""; - + if([uti isEqualToString: @"public.jpeg"]) { mimeType = @"image/jpeg"; @@ -1231,7 +1231,7 @@ - (NSString*) pMimeTypeForUTI: (NSString*) uti { mimeType = @"image/png"; } - + return mimeType; } @@ -1240,50 +1240,50 @@ - (NSString*) pMimeTypeForUTI: (NSString*) uti - (void) pLoadAsset: (NSURL*) assetURL usingDataCompletionBlock: (CPLoadAssetDataCompletionBlock) completionBlock { if(completionBlock) - { + { ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init]; //AppDelegate().assetLibrary; - - [library assetForURL: assetURL - resultBlock:^(ALAsset *asset) + + [library assetForURL: assetURL + resultBlock:^(ALAsset *asset) { assert([NSThread isMainThread]); - + ALAssetRepresentation* rep = [asset defaultRepresentation]; BOOL didFail = YES; - + if(rep) { unsigned long size = (unsigned long)[rep size]; uint8_t* buffer = (uint8_t*)malloc(size); - + if(buffer) { NSError* error = nil; NSUInteger numBytes = 0; numBytes = [rep getBytes: buffer fromOffset: 0 length: size error: &error]; - + if(numBytes > 0 && !error) { NSData* photoData = [[NSData alloc] initWithBytes: buffer length: size]; - + didFail = NO; completionBlock(photoData, [rep UTI], didFail); } - else if(error) + else if(error) { NSLog(@"ALAssetRepresentation -getBytes::: failed - %@", [error description]); } - + free(buffer); } } - + if(didFail) { completionBlock(nil, nil, YES); } - - } + + } failureBlock:^(NSError *error) { NSLog(@"%@", [error description]); @@ -1297,15 +1297,15 @@ - (void) pLoadAsset: (NSURL*) assetURL usingImageCompletionBlock: (CPLoadAssetIm if(completionBlock) { ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init]; // AppDelegate().assetLibrary; - - [library assetForURL: assetURL - resultBlock:^(ALAsset *asset) + + [library assetForURL: assetURL + resultBlock:^(ALAsset *asset) { assert([NSThread isMainThread]); - + ALAssetRepresentation* rep = [asset defaultRepresentation]; BOOL didFail = YES; - + if(rep) { UIImage* image = [UIImage imageWithCGImage: [rep fullResolutionImage]]; @@ -1315,13 +1315,13 @@ - (void) pLoadAsset: (NSURL*) assetURL usingImageCompletionBlock: (CPLoadAssetIm completionBlock(image, [rep UTI], didFail); } } - + if(didFail) { completionBlock(nil, nil, YES); } - - } + + } failureBlock:^(NSError *error) { NSLog(@"%@", [error description]); @@ -1338,7 +1338,7 @@ - (void) pAdjustScrollViewFrame CGRect frame = self.scrollView.frame; frame.origin.x -= PADDING; frame.size.width += (2 * PADDING); - + self.scrollView.frame = frame; } @@ -1356,13 +1356,13 @@ - (CGSize) pContentSizeForScrollView return CGSizeMake(bounds.size.width * self.processedImages.count, bounds.size.height); } -- (CGRect) pFrameForViewAtIndex: (NSInteger) index +- (CGRect) pFrameForViewAtIndex: (NSInteger) index { CGRect bounds = self.scrollView.bounds; CGRect viewFrame = bounds; viewFrame.size.width -= (2 * PADDING); viewFrame.origin.x = (bounds.size.width * index) + PADDING; - + return viewFrame; } @@ -1376,13 +1376,13 @@ - (void) pConfigureView: (UIView*) view forIndex: (NSInteger) index - (BOOL) pIsDisplayingViewForIndex: (NSUInteger) index visibleView: (BCImageView**) visibleView { BOOL foundView = NO; - + for(BCImageView* view in self.visibleImageViews) { if(view.index == index) { foundView = YES; - + if(visibleView) { *visibleView = view; @@ -1390,7 +1390,7 @@ - (BOOL) pIsDisplayingViewForIndex: (NSUInteger) index visibleView: (BCImageView break; } } - + return foundView; } @@ -1408,60 +1408,60 @@ - (void) pTileViews { /* CGSize contentSize = [self pContentSizeForScrollView]; - + if(!CGSizeEqualToSize(contentSize, self.scrollView.contentSize)) { self.scrollView.contentSize = contentSize; } */ - + CGRect visibleBounds = self.scrollView.bounds; NSInteger firstNeededViewIndex = floorf(CGRectGetMinX(visibleBounds) / CGRectGetWidth(visibleBounds)) - 1; NSInteger lastNeededViewIndex = floorf((CGRectGetMaxX(visibleBounds)-1) / CGRectGetWidth(visibleBounds)) + 1; firstNeededViewIndex = MAX(firstNeededViewIndex, 0); lastNeededViewIndex = MIN(lastNeededViewIndex, self.processedImages.count - 1); - - // Recycle no-longer-visible pages - + + // Recycle no-longer-visible pages + for(BCImageView* view in self.visibleImageViews) { - if(view.index < firstNeededViewIndex || view.index > lastNeededViewIndex) + if(view.index < firstNeededViewIndex || view.index > lastNeededViewIndex) { [self.recycledImageViews addObject: view]; [view clearContent]; [view removeFromSuperview]; } } - + [self.visibleImageViews minusSet: self.recycledImageViews]; // add missing pages - - for(NSInteger index = firstNeededViewIndex; index <= lastNeededViewIndex; index++) + + for(NSInteger index = firstNeededViewIndex; index <= lastNeededViewIndex; index++) { if(![self pIsDisplayingViewForIndex: index visibleView: NULL]) { id asset = [self.processedImages objectAtIndex: index]; - + // We want an empty slot if the processed images slot if a placeholder if(BCCastAsClass(NSNumber, asset) == nil) { BCImageView* imageView = [self pDequeueRecycledView]; - + if(imageView == nil) { imageView = [[BCImageView alloc] initWithFrame: BCViewFrame]; } - + [self pConfigureView: imageView forIndex: index]; [imageView useAsset: asset]; - + [self.scrollView addSubview: imageView]; [self.visibleImageViews addObject: imageView]; } } - } + } } - (NSURL*) pURLForVisibleImageView @@ -1476,7 +1476,7 @@ - (NSURL*) pURLForVisibleImageView { url = BCCastAsClass(NSURL, [self.processedImages objectAtIndex: firstNeededViewIndex]); } - + return url; } diff --git a/CrossProcess/ImageMetadata.h b/CrossProcess/ImageMetadata.h index 774ed29..1b1794c 100644 --- a/CrossProcess/ImageMetadata.h +++ b/CrossProcess/ImageMetadata.h @@ -3,7 +3,7 @@ // CrossProcess // // Created by Sean Hess on 8/3/16. -// Copyright © 2016 Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/ImageMetadata.m b/CrossProcess/ImageMetadata.m index c830ac5..3fa70ee 100644 --- a/CrossProcess/ImageMetadata.m +++ b/CrossProcess/ImageMetadata.m @@ -3,7 +3,7 @@ // CrossProcess // // Created by Sean Hess on 8/3/16. -// Copyright © 2016 Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "ImageMetadata.h" @@ -13,7 +13,7 @@ @implementation ImageMetadata +(void)fetchMetadataForURL:(NSURL*)url found:(void(^)(NSDictionary*))found { PHAsset * asset = [PHAsset fetchAssetsWithALAssetURLs:@[url] options:nil][0]; - + [ImageMetadata fetchMetadataForAsset:asset found:found]; } @@ -21,7 +21,7 @@ +(void)fetchMetadataForAsset:(PHAsset*)asset found:(void(^)(NSDictionary*))found PHContentEditingInputRequestOptions * options = [[PHContentEditingInputRequestOptions alloc] init]; options.networkAccessAllowed = true; [asset requestContentEditingInputWithOptions:options completionHandler:^(PHContentEditingInput * _Nullable contentEditingInput, NSDictionary * _Nonnull info) { - + NSURL * fullURL = [contentEditingInput fullSizeImageURL]; CIImage* fullImage = [CIImage imageWithContentsOfURL:fullURL]; NSDictionary * meta = [fullImage properties]; diff --git a/CrossProcess/UIImageAdditions.h b/CrossProcess/UIImageAdditions.h index 409718d..9dc950c 100644 --- a/CrossProcess/UIImageAdditions.h +++ b/CrossProcess/UIImageAdditions.h @@ -2,7 +2,7 @@ // UIImageAdditions.h // CrossProcess // -// Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import diff --git a/CrossProcess/UIImageAdditions.m b/CrossProcess/UIImageAdditions.m index 893fc09..4938db6 100644 --- a/CrossProcess/UIImageAdditions.m +++ b/CrossProcess/UIImageAdditions.m @@ -2,7 +2,7 @@ // UIImageAdditions.m // CrossProcess // -// Copyright Banana Camera Company 2010 - 2012. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import "UIImageAdditions.h" @@ -17,16 +17,16 @@ @implementation UIImage(BananaCameraAdditions) - (void) createScaledImage: (CGSize) size atURL: (NSURL*) destinationURL { NSFileManager* fm = [NSFileManager defaultManager]; - + if([destinationURL isFileURL] && [fm fileExistsAtPath: [destinationURL path]] == NO) { UIGraphicsBeginImageContextWithOptions(size, NO, 1.0); - + CGContextRef context = UIGraphicsGetCurrentContext(); - + CGContextSetInterpolationQuality(context, kCGInterpolationHigh); [self drawInRect: CGRectMake(0.0f, 0.0f, size.width, size.height)]; - + UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext(); if(scaledImage) { @@ -39,9 +39,9 @@ - (void) createScaledImage: (CGSize) size atURL: (NSURL*) destinationURL NSLog(@"Unabled to write scaled image to %@", destinationURL); } } - + UIGraphicsEndImageContext(); - } + } } } @@ -61,52 +61,52 @@ - (id) init { id appDelegate = BCCastAsProtocol(BCAppDelegate, [[UIApplication sharedApplication] delegate]); NSURL* appSupportURL = [appDelegate appSupportURL]; - + if(appSupportURL) { _appSupportURL = appSupportURL; - _imageAssets = [NSDictionary dictionaryWithContentsOfFile: [[NSBundle mainBundle] pathForResource: @"image_assets" + _imageAssets = [NSDictionary dictionaryWithContentsOfFile: [[NSBundle mainBundle] pathForResource: @"image_assets" ofType: @"plist"]]; _imageAssetsNames = [appDelegate imageAssetsNames]; } } - + return self; } - (void) main { UIBackgroundTaskIdentifier backgroundIdent = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{}]; - + NSUInteger assetsCount = self.imageAssetsNames.count; - + for(NSUInteger asset = 0; asset < assetsCount; ++asset) { NSString* assetName = [self.imageAssetsNames objectAtIndex: asset]; UIImage* image = [UIImage imageNamed: assetName]; - + if(image) { for(NSDictionary* level1Items in [self.imageAssets allValues]) { for(NSDictionary* level2Items in [level1Items allValues]) { - @autoreleasepool + @autoreleasepool { CGSize resultingSize = CGSizeFromString([level2Items objectForKey: @"image-size"]); NSURL* assetURL = [self.appSupportURL URLByAppendingPathComponent: [level2Items objectForKey: assetName]]; - + [image createScaledImage: resultingSize atURL: assetURL]; } } } } } - + #if DEBUG NSLog(@"Finished creating scaled images..."); #endif - + [[UIApplication sharedApplication] endBackgroundTask: backgroundIdent]; } diff --git a/CrossProcess/main.m b/CrossProcess/main.m index d25e244..ed27d2c 100644 --- a/CrossProcess/main.m +++ b/CrossProcess/main.m @@ -2,7 +2,7 @@ // main.m // CrossProcess // -// Copyright 2010-2013 Banana Camera Company. All rights reserved. +// Copyright 2019 Zinc Collective LLC. All rights reserved. // #import @@ -14,7 +14,7 @@ int main(int argc, char *argv[]) { - @autoreleasepool + @autoreleasepool { srand48(time(NULL)); diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..65e96ad --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,40 @@ +The Prosperity Public License 2.0.0 + +Contributor: Zinc Collective LLC + +Source Code: https://github.com/zinc-collective/mp-crossprocess + +This license lets you use and share this software for free, +with a trial-length time limit on commercial use. Specifically: + +If you follow the rules below, you may do everything with this +software that would otherwise infringe either the contributor's +copyright in it, any patent claim the contributor can license +that covers this software as of the contributor's latest +contribution, or both. + +1. You must limit use of this software in any manner primarily + intended for or directed toward commercial advantage or + private monetary compensation to a trial period of 32 + consecutive calendar days. This limit does not apply to use in + developing feedback, modifications, or extensions that you + contribute back to those giving this license. + +2. Ensure everyone who gets a copy of this software from you, in + source code or any other form, gets the text of this license + and the contributor and source code lines above. + +3. Do not make any legal claim against anyone for infringing any + patent claim they would infringe by using this software alone, + accusing this software, with or without changes, alone or as + part of a larger application. + +You are excused for unknowingly breaking rule 1 if you stop +doing anything requiring this license within 30 days of +learning you broke the rule. + +**This software comes as is, without any warranty at all. As far +as the law allows, the contributor will not be liable for any +damages related to this software or this license, for any kind of +legal claim.** +