a scrappy gimbal that insults you in shakespearean english

iOS work, Gemini, TTS

+487 -152
+6
mobile/composeApp/build.gradle.kts
··· 46 46 implementation(libs.androidx.lifecycle.runtime.compose) 47 47 implementation(libs.camerak) 48 48 implementation(libs.androidx.graphics.shapes) 49 + implementation(libs.generativeai.google) 50 + implementation(libs.kotlinx.coroutines.core) 51 + implementation(libs.tts) 52 + 53 + // Optional: Extensions for Compose 54 + implementation(libs.tts.compose) 49 55 } 50 56 } 51 57 }
+6
mobile/composeApp/src/androidMain/AndroidManifest.xml
··· 28 28 <category android:name="android.intent.category.LAUNCHER" /> 29 29 </intent-filter> 30 30 </activity> 31 + 31 32 </application> 33 + <queries> 34 + <intent> 35 + <action android:name="android.intent.action.TTS_SERVICE" /> 36 + </intent> 37 + </queries> 32 38 33 39 </manifest>
+8 -9
mobile/composeApp/src/androidMain/kotlin/com/paytondeveloper/myrus_mobile/MLFace.android.kt
··· 35 35 import java.io.FileOutputStream 36 36 import java.io.IOException 37 37 38 - actual fun CameraController.getResolution(): Size? { 39 - val field = this::class.java.getDeclaredField("imageCapture") 40 - field.isAccessible = true 41 - val imgCapture = field.get(this) as ImageCapture? 42 - val res = imgCapture?.resolutionInfo?.resolution 43 - return if (res != null) Size(width = res.width.toFloat(), height = res.height.toFloat()) else null 44 - 45 - } 38 + //actual fun CameraController.getResolution(): Size? { 39 + // val field = this::class.java.getDeclaredField("imageCapture") 40 + // field.isAccessible = true 41 + // val imgCapture = field.get(this) as ImageCapture? 42 + // val res = imgCapture?.resolutionInfo?.resolution 43 + // return if (res != null) Size(width = res.width.toFloat(), height = res.height.toFloat()) else null 44 + // 45 + //} 46 46 47 47 actual fun analyzeImage(img: ByteArray, callback: (Rect, Size) -> Unit) { 48 48 // println("res2: ${saveByteArrayToMediaStore(AppInfo.app.applicationContext, img)}") ··· 61 61 AppInfo.canDetectFace = true 62 62 res.result.forEach { 63 63 println("FACE @ ${it.boundingBox.top}") 64 - 65 64 callback(Rect(it.boundingBox.top.toFloat(), it.boundingBox.left.toFloat(), it.boundingBox.bottom.toFloat(), it.boundingBox.right.toFloat()), Size(width = bitmap.width.toFloat(), height = bitmap.height.toFloat())) 66 65 } 67 66 }
+75
mobile/composeApp/src/androidMain/kotlin/com/paytondeveloper/myrus_mobile/MainActivity.kt
··· 1 1 package com.paytondeveloper.myrus_mobile 2 2 3 + import android.R.attr.bitmap 3 4 import android.app.Activity 4 5 import android.app.Application 6 + import android.graphics.Bitmap 5 7 import android.os.Bundle 8 + import android.speech.tts.TextToSpeech 9 + import android.speech.tts.UtteranceProgressListener 6 10 import androidx.activity.ComponentActivity 7 11 import androidx.activity.compose.setContent 8 12 import androidx.activity.enableEdgeToEdge 9 13 import androidx.compose.runtime.Composable 14 + import androidx.compose.ui.platform.LocalContext 15 + import androidx.compose.ui.platform.LocalView 10 16 import androidx.compose.ui.tooling.preview.Preview 17 + import androidx.core.graphics.applyCanvas 18 + import kotlinx.coroutines.suspendCancellableCoroutine 19 + import java.nio.ByteBuffer 20 + import kotlin.coroutines.resume 21 + import kotlin.coroutines.suspendCoroutine 22 + 11 23 12 24 class MainActivity : ComponentActivity() { 13 25 override fun onCreate(savedInstanceState: Bundle?) { ··· 18 30 setContent { 19 31 App() 20 32 } 33 + } 34 + } 35 + 36 + actual fun epochMillis() = System.currentTimeMillis() 37 + 38 + actual suspend fun sayText(text: String) { 39 + return suspendCancellableCoroutine { continuation -> 40 + var tts: TextToSpeech? = null 41 + 42 + tts = TextToSpeech(AppInfo.app.applicationContext) { status -> 43 + println("init'd with status: $status") 44 + 45 + if (status == TextToSpeech.SUCCESS) { 46 + // Now the TTS instance is initialized and can be used 47 + tts?.let { ttsInstance -> 48 + val result = ttsInstance.speak(text, TextToSpeech.QUEUE_FLUSH, null, "utteranceId") 49 + 50 + if (result == TextToSpeech.SUCCESS) { 51 + ttsInstance.setOnUtteranceProgressListener(object : UtteranceProgressListener() { 52 + override fun onStart(utteranceId: String?) {} 53 + 54 + override fun onDone(utteranceId: String?) { 55 + continuation.resume(Unit) 56 + ttsInstance.shutdown() 57 + } 58 + 59 + override fun onError(utteranceId: String?) { 60 + continuation.resume(Unit) 61 + ttsInstance.shutdown() 62 + } 63 + }) 64 + } else { 65 + continuation.resume(Unit) 66 + ttsInstance.shutdown() 67 + } 68 + } 69 + } else { 70 + continuation.resume(Unit) 71 + tts?.shutdown() 72 + } 73 + } 74 + 75 + continuation.invokeOnCancellation { 76 + tts?.stop() 77 + tts?.shutdown() 78 + } 79 + } 80 + } 81 + 82 + @Composable 83 + fun CaptureView(): ByteArray { 84 + val view = LocalView.current 85 + val context = LocalContext.current 86 + val bmp = Bitmap.createBitmap(view.width, view.height, 87 + Bitmap.Config.ARGB_8888).applyCanvas { 88 + view.draw(this) 89 + } 90 + bmp.let { 91 + val lnth: Int = bmp.getByteCount() 92 + val dst = ByteBuffer.allocate(lnth) 93 + bmp.copyPixelsToBuffer(dst) 94 + val barray = dst.array() 95 + return barray 21 96 } 22 97 } 23 98
+65 -40
mobile/composeApp/src/commonMain/kotlin/com/paytondeveloper/myrus_mobile/App.kt
··· 7 7 import androidx.compose.foundation.layout.fillMaxSize 8 8 import androidx.compose.foundation.layout.fillMaxWidth 9 9 import androidx.compose.foundation.layout.offset 10 + import androidx.compose.foundation.layout.padding 10 11 import androidx.compose.material.Button 11 12 import androidx.compose.material.MaterialTheme 13 + import androidx.compose.material.Slider 12 14 import androidx.compose.material.Text 13 15 import androidx.compose.runtime.* 14 16 import androidx.compose.ui.Alignment ··· 30 32 import com.kashif.cameraK.permissions.providePermissions 31 33 import com.kashif.cameraK.result.ImageCaptureResult 32 34 import com.kashif.cameraK.ui.CameraPreview 35 + import dev.shreyaspatil.ai.client.generativeai.GenerativeModel 36 + import dev.shreyaspatil.ai.client.generativeai.type.content 33 37 import io.ktor.util.Identity.encode 38 + import kotlinx.coroutines.CoroutineScope 39 + import kotlinx.coroutines.Dispatchers 40 + import kotlinx.coroutines.IO 34 41 import kotlinx.coroutines.delay 42 + import kotlinx.coroutines.launch 35 43 import org.jetbrains.compose.resources.painterResource 36 44 import org.jetbrains.compose.ui.tooling.preview.Preview 37 45 38 46 import myrus_mobile.composeapp.generated.resources.Res 39 47 import myrus_mobile.composeapp.generated.resources.compose_multiplatform 48 + import nl.marc_apps.tts.TextToSpeechEngine 49 + import nl.marc_apps.tts.rememberTextToSpeechOrNull 40 50 41 51 expect fun analyzeImage(img: ByteArray, callback: (Rect, Size) -> Unit) 42 52 ··· 44 54 data class Rect(val top: Float, val left: Float, val bottom: Float, val right: Float) 45 55 data class FaceData(val boundingBox: Rect) 46 56 57 + expect suspend fun sayText(text: String) 47 58 48 - expect fun CameraController.getResolution(): Size? 59 + val genAI = GenerativeModel( 60 + "gemini-2.0-flash", 61 + apiKey = "AIzaSyCy56R6_T3Neu54W45MMSTGpXFEb92V2yI" 62 + ) 63 + 64 + expect fun epochMillis(): Long 65 + 49 66 50 67 @Composable 51 68 @Preview ··· 67 84 var camController by remember { mutableStateOf<CameraController?>(null) } 68 85 var camSize by remember { mutableStateOf<Size?>(null) } 69 86 var currentThingy by remember { mutableStateOf<Rect?>(Rect(0f,0f,0f,0f)) } 87 + var delayMillis by remember { mutableStateOf(1000) } 88 + var analyzing by remember { mutableStateOf(true) } 89 + val tts = rememberTextToSpeechOrNull(TextToSpeechEngine.Google) 70 90 LaunchedEffect(Unit) { 71 91 //not proud of this. 92 + suspend fun roast(image: ByteArray) { 93 + var content = content { 94 + image(image) 95 + text("make a shakespearean insult for the person in the middle of the image. return only the insult. be specific to the person in the image") 96 + } 97 + val res = genAI.generateContent(content) 98 + println("RES: ${res.text} TTS: ${tts}") 99 + // tts?.let { tts -> 100 + // tts.say(res.text ?: "uh oh its broken", true) 101 + // } 102 + sayText(res.text ?: "uh oh its borken") 103 + analyzing = true 104 + } 72 105 suspend fun runloop() { 73 106 74 - val res = camController?.takePicture() 75 - res?.let { 76 - when (it) { 77 - is ImageCaptureResult.Error -> { 78 - println("error taking pic. skipping frame: ${it.exception}") 79 - } 80 - is ImageCaptureResult.Success -> { 81 - analyzeImage(it.byteArray, { it, size -> 107 + if (analyzing) { 108 + 109 + val res = camController?.takePicture() 110 + res?.let { 111 + when (it) { 112 + is ImageCaptureResult.Error -> { 113 + println("error taking pic. skipping frame: ${it.exception}") 114 + } 115 + is ImageCaptureResult.Success -> { 116 + analyzeImage(it.byteArray, { bounds, size -> 82 117 // println("offset: ${it.top} ${it.left}") 83 - val factorY = it.top.toFloat() / size.height.toFloat() 84 - val factorX = it.left.toFloat() / size.width.toFloat() 118 + val factorY = bounds.top / size.height 119 + val factorX = bounds.left / size.width 85 120 86 - val newY = factorY * camSize!!.height.toFloat() 87 - val newX = factorX * camSize!!.width.toFloat() 121 + val newY = factorY * camSize!!.height 122 + val newX = factorX * camSize!!.width 88 123 89 - currentThingy = it.copy(top = newY, left = newX) 90 - }) 124 + currentThingy = bounds.copy(top = newY, left = newX) 125 + analyzing = false 126 + CoroutineScope(Dispatchers.IO).launch { 127 + roast(it.byteArray) 128 + } 129 + }) 130 + 131 + } 91 132 } 92 133 } 93 134 } 94 - delay(1000) 135 + delay(delayMillis.toLong()) 95 136 runloop() 96 137 } 138 + 97 139 runloop() 98 140 } 99 - Box(modifier = Modifier/*.drawWithCache { 100 - if (currentThingy != null) { 101 - val roundedPolygon = RoundedPolygon( 102 - numVertices = 6, 103 - radius = (currentThingy!!.right - currentThingy!!.left).toFloat(), 104 - centerX = currentThingy!!.left.toFloat(), 105 - centerY = currentThingy!!.top.toFloat() 106 - ) 107 - val roundedPolygonPath = roundedPolygon 108 - onDrawBehind { 109 - // drawPath(roundedPolygonPath, color = Color.Blue) 110 - drawRect( 111 - color = Color.Red, 112 - topLeft = Offset(currentThingy!!.left.toFloat(), currentThingy!!.top.toFloat()), 113 - size = androidx.compose.ui.geometry.Size(width = (currentThingy!!.right - currentThingy!!.left).toFloat(), height = (currentThingy!!.bottom - currentThingy!!.top).toFloat()) 114 - ) 115 - } 116 - 117 - } else { 118 - onDrawBehind { } 119 - } 120 - }*/) { 141 + Box(modifier = Modifier) { 121 142 val topPx = with(LocalDensity.current) { 122 143 currentThingy!!.top.toDp() 123 144 } ··· 136 157 setCameraLens(CameraLens.FRONT) 137 158 setImageFormat(ImageFormat.PNG) 138 159 setDirectory(Directory.PICTURES) 139 - 140 - 141 160 }, onCameraControllerReady = { 142 161 camController = it 162 + if (getPlatform().name.contains("iOS")) { 163 + camController!!.toggleCameraLens() 164 + } 143 165 }) 144 166 Text("Face", modifier = Modifier.offset(x = leftPx, y = topPx)) 145 167 } 168 + Slider(modifier = Modifier.padding(top = 64.dp), value = delayMillis.toFloat(), onValueChange = { 169 + delayMillis = it.toInt() 170 + }, valueRange = 16.67f..5000f) 146 171 } else { 147 172 Text("no permissions!! can't do anything :(") 148 173 }
+21 -2
mobile/composeApp/src/iosMain/kotlin/com/paytondeveloper/myrus_mobile/MLFace.ios.kt
··· 2 2 3 3 import com.kashif.cameraK.builder.CameraControllerBuilder 4 4 import com.kashif.cameraK.controller.CameraController 5 + import kotlinx.cinterop.BetaInteropApi 6 + import kotlinx.cinterop.ExperimentalForeignApi 7 + import kotlinx.cinterop.memScoped 8 + import kotlinx.cinterop.toCValues 9 + import platform.Foundation.NSData 10 + import platform.Foundation.create 5 11 6 - actual fun analyzeImage(img: ByteArray, callback:(Rect, Size) -> Unit) {} 12 + object NativeAnalyzer { 13 + lateinit var analyzeImageNative: (img: ByteArray, callback: (Rect, Size) -> Unit) -> Unit 14 + @OptIn(ExperimentalForeignApi::class, BetaInteropApi::class) 15 + fun byteArrayToData(byteArray: ByteArray): NSData = memScoped { 16 + return NSData.create( 17 + bytes = byteArray.toCValues().getPointer(this), 18 + length = byteArray.size.toULong() 19 + ) 20 + } 21 + } 22 + 23 + actual fun analyzeImage(img: ByteArray, callback:(Rect, Size) -> Unit) { 24 + NativeAnalyzer.analyzeImageNative(img, callback) 25 + } 26 + 7 27 8 - actual fun CameraController.getResolution(): Size? {return null}
+22 -1
mobile/composeApp/src/iosMain/kotlin/com/paytondeveloper/myrus_mobile/MainViewController.kt
··· 1 1 package com.paytondeveloper.myrus_mobile 2 2 3 + import androidx.compose.runtime.Composable 4 + import androidx.compose.ui.interop.LocalUIViewController 3 5 import androidx.compose.ui.window.ComposeUIViewController 6 + import kotlinx.cinterop.ExperimentalForeignApi 7 + import kotlinx.cinterop.addressOf 8 + import kotlinx.cinterop.usePinned 9 + import platform.Foundation.NSData 10 + import platform.Foundation.NSDate 11 + import platform.Foundation.timeIntervalSince1970 12 + import platform.UIKit.UIGraphicsImageRenderer 13 + import platform.posix.memcpy 14 + 15 + fun MainViewController() = ComposeUIViewController { App() } 4 16 5 - fun MainViewController() = ComposeUIViewController { App() } 17 + actual fun epochMillis(): Long = (NSDate().timeIntervalSince1970 * 1000).toLong() 18 + 19 + @OptIn(ExperimentalForeignApi::class) 20 + fun NSDataToByteArray(data: NSData): ByteArray = ByteArray(data.length.toInt()).apply { 21 + usePinned { 22 + memcpy(it.addressOf(0), data.bytes, data.length) 23 + } 24 + } 25 + 26 + actual suspend fun sayText(text: String) {}
+13
mobile/gradle/libs.versions.toml
··· 14 14 camerak = "0.0.10" 15 15 compose-multiplatform = "1.7.0" 16 16 faceDetection = "16.1.7" 17 + generativeai = "0.9.0" 18 + generativeaiGoogle = "0.9.0-1.0.1" 17 19 graphicsShapes = "1.0.1" 18 20 junit = "4.13.2" 19 21 kotlin = "2.1.0" 22 + kotlinxCoroutinesCore = "1.9.0" 23 + ktorClientCio = "2.3.2" 24 + openaiClient = "4.0.1" 25 + ttsCompose = "2.5.0" 20 26 21 27 [libraries] 22 28 androidx-graphics-shapes = { module = "androidx.graphics:graphics-shapes", version.ref = "graphicsShapes" } 23 29 camerak = { module = "io.github.kashif-mehmood-km:camerak", version.ref = "camerak" } 24 30 face-detection = { module = "com.google.mlkit:face-detection", version.ref = "faceDetection" } 31 + generativeai = { module = "com.google.ai.client.generativeai:generativeai", version.ref = "generativeai" } 32 + generativeai-google = { module = "dev.shreyaspatil.generativeai:generativeai-google", version.ref = "generativeaiGoogle" } 25 33 kotlin-test = { module = "org.jetbrains.kotlin:kotlin-test", version.ref = "kotlin" } 26 34 kotlin-test-junit = { module = "org.jetbrains.kotlin:kotlin-test-junit", version.ref = "kotlin" } 27 35 junit = { group = "junit", name = "junit", version.ref = "junit" } ··· 34 42 androidx-activity-compose = { module = "androidx.activity:activity-compose", version.ref = "androidx-activityCompose" } 35 43 androidx-lifecycle-viewmodel = { group = "org.jetbrains.androidx.lifecycle", name = "lifecycle-viewmodel", version.ref = "androidx-lifecycle" } 36 44 androidx-lifecycle-runtime-compose = { group = "org.jetbrains.androidx.lifecycle", name = "lifecycle-runtime-compose", version.ref = "androidx-lifecycle" } 45 + kotlinx-coroutines-core = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-core", version.ref = "kotlinxCoroutinesCore" } 46 + ktor-client-cio = { module = "io.ktor:ktor-client-cio", version.ref = "ktorClientCio" } 47 + openai-client = { module = "com.aallam.openai:openai-client", version.ref = "openaiClient" } 48 + tts = { module = "nl.marc-apps:tts", version.ref = "ttsCompose" } 49 + tts-compose = { module = "nl.marc-apps:tts-compose", version.ref = "ttsCompose" } 37 50 38 51 [plugins] 39 52 androidApplication = { id = "com.android.application", version.ref = "agp" }
+1
mobile/iosApp/.gitignore
··· 1 + Pods/
+10
mobile/iosApp/Podfile
··· 1 + # Uncomment the next line to define a global platform for your project 2 + # platform :ios, '16.0' 3 + 4 + target 'iosApp' do 5 + # Comment the next line if you don't want to use dynamic frameworks 6 + use_frameworks! 7 + 8 + # Pods for iosApp 9 + pod 'GoogleMLKit/FaceDetection', '7.0.0' 10 + end
+81
mobile/iosApp/Podfile.lock
··· 1 + PODS: 2 + - GoogleDataTransport (10.1.0): 3 + - nanopb (~> 3.30910.0) 4 + - PromisesObjC (~> 2.4) 5 + - GoogleMLKit/FaceDetection (7.0.0): 6 + - GoogleMLKit/MLKitCore 7 + - MLKitFaceDetection (~> 6.0.0) 8 + - GoogleMLKit/MLKitCore (7.0.0): 9 + - MLKitCommon (~> 12.0.0) 10 + - GoogleToolboxForMac/Defines (4.2.1) 11 + - GoogleToolboxForMac/Logger (4.2.1): 12 + - GoogleToolboxForMac/Defines (= 4.2.1) 13 + - "GoogleToolboxForMac/NSData+zlib (4.2.1)": 14 + - GoogleToolboxForMac/Defines (= 4.2.1) 15 + - GoogleUtilities/Environment (8.0.2): 16 + - GoogleUtilities/Privacy 17 + - GoogleUtilities/Logger (8.0.2): 18 + - GoogleUtilities/Environment 19 + - GoogleUtilities/Privacy 20 + - GoogleUtilities/Privacy (8.0.2) 21 + - GoogleUtilities/UserDefaults (8.0.2): 22 + - GoogleUtilities/Logger 23 + - GoogleUtilities/Privacy 24 + - GTMSessionFetcher/Core (3.5.0) 25 + - MLImage (1.0.0-beta6) 26 + - MLKitCommon (12.0.0): 27 + - GoogleDataTransport (~> 10.0) 28 + - GoogleToolboxForMac/Logger (< 5.0, >= 4.2.1) 29 + - "GoogleToolboxForMac/NSData+zlib (< 5.0, >= 4.2.1)" 30 + - GoogleUtilities/Logger (~> 8.0) 31 + - GoogleUtilities/UserDefaults (~> 8.0) 32 + - GTMSessionFetcher/Core (< 4.0, >= 3.3.2) 33 + - MLKitFaceDetection (6.0.0): 34 + - MLKitCommon (~> 12.0) 35 + - MLKitVision (~> 8.0) 36 + - MLKitVision (8.0.0): 37 + - GoogleToolboxForMac/Logger (< 5.0, >= 4.2.1) 38 + - "GoogleToolboxForMac/NSData+zlib (< 5.0, >= 4.2.1)" 39 + - GTMSessionFetcher/Core (< 4.0, >= 3.3.2) 40 + - MLImage (= 1.0.0-beta6) 41 + - MLKitCommon (~> 12.0) 42 + - nanopb (3.30910.0): 43 + - nanopb/decode (= 3.30910.0) 44 + - nanopb/encode (= 3.30910.0) 45 + - nanopb/decode (3.30910.0) 46 + - nanopb/encode (3.30910.0) 47 + - PromisesObjC (2.4.0) 48 + 49 + DEPENDENCIES: 50 + - GoogleMLKit/FaceDetection (= 7.0.0) 51 + 52 + SPEC REPOS: 53 + trunk: 54 + - GoogleDataTransport 55 + - GoogleMLKit 56 + - GoogleToolboxForMac 57 + - GoogleUtilities 58 + - GTMSessionFetcher 59 + - MLImage 60 + - MLKitCommon 61 + - MLKitFaceDetection 62 + - MLKitVision 63 + - nanopb 64 + - PromisesObjC 65 + 66 + SPEC CHECKSUMS: 67 + GoogleDataTransport: aae35b7ea0c09004c3797d53c8c41f66f219d6a7 68 + GoogleMLKit: eff9e23ec1d90ea4157a1ee2e32a4f610c5b3318 69 + GoogleToolboxForMac: d1a2cbf009c453f4d6ded37c105e2f67a32206d8 70 + GoogleUtilities: 26a3abef001b6533cf678d3eb38fd3f614b7872d 71 + GTMSessionFetcher: 5aea5ba6bd522a239e236100971f10cb71b96ab6 72 + MLImage: 0ad1c5f50edd027672d8b26b0fee78a8b4a0fc56 73 + MLKitCommon: 07c2c33ae5640e5380beaaa6e4b9c249a205542d 74 + MLKitFaceDetection: 2a593db4837db503ad3426b565e7aab045cefea5 75 + MLKitVision: 45e79d68845a2de77e2dd4d7f07947f0ed157b0e 76 + nanopb: fad817b59e0457d11a5dfbde799381cd727c1275 77 + PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47 78 + 79 + PODFILE CHECKSUM: dc40bfac79d7daca47a5105ced3cb04958e00832 80 + 81 + COCOAPODS: 1.16.2
+81 -39
mobile/iosApp/iosApp.xcodeproj/project.pbxproj
··· 7 7 objects = { 8 8 9 9 /* Begin PBXBuildFile section */ 10 + 00FBAE687F5BCD32801C67BA /* Pods_iosApp.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 07734A1087237371E675AE3E /* Pods_iosApp.framework */; }; 10 11 058557BB273AAA24004C7B11 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 058557BA273AAA24004C7B11 /* Assets.xcassets */; }; 11 12 058557D9273AAEEB004C7B11 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 058557D8273AAEEB004C7B11 /* Preview Assets.xcassets */; }; 12 13 2152FB042600AC8F00CF470E /* iOSApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2152FB032600AC8F00CF470E /* iOSApp.swift */; }; 13 14 7555FF83242A565900829871 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7555FF82242A565900829871 /* ContentView.swift */; }; 14 - E9B512A52D862B0F00D52AB6 /* MLKitBarcodeScanning in Frameworks */ = {isa = PBXBuildFile; productRef = E9B512A42D862B0F00D52AB6 /* MLKitBarcodeScanning */; }; 15 - E9B512A72D862B0F00D52AB6 /* MLKitFaceDetection in Frameworks */ = {isa = PBXBuildFile; productRef = E9B512A62D862B0F00D52AB6 /* MLKitFaceDetection */; }; 16 15 /* End PBXBuildFile section */ 17 16 18 17 /* Begin PBXFileReference section */ 19 18 058557BA273AAA24004C7B11 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; }; 20 19 058557D8273AAEEB004C7B11 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = "<group>"; }; 20 + 07734A1087237371E675AE3E /* Pods_iosApp.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_iosApp.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 21 21 2152FB032600AC8F00CF470E /* iOSApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = iOSApp.swift; sourceTree = "<group>"; }; 22 22 7555FF7B242A565900829871 /* myrus-mobile.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "myrus-mobile.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 23 23 7555FF82242A565900829871 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; }; 24 24 7555FF8C242A565B00829871 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; }; 25 + 95ADA54C8FE56AF9CA35ABD0 /* Pods-iosApp.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-iosApp.debug.xcconfig"; path = "Target Support Files/Pods-iosApp/Pods-iosApp.debug.xcconfig"; sourceTree = "<group>"; }; 25 26 AB3632DC29227652001CCB65 /* Config.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Config.xcconfig; sourceTree = "<group>"; }; 27 + F7B9AA6F955C972BAA204DE2 /* Pods-iosApp.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-iosApp.release.xcconfig"; path = "Target Support Files/Pods-iosApp/Pods-iosApp.release.xcconfig"; sourceTree = "<group>"; }; 26 28 /* End PBXFileReference section */ 27 29 28 30 /* Begin PBXFrameworksBuildPhase section */ ··· 30 32 isa = PBXFrameworksBuildPhase; 31 33 buildActionMask = 2147483647; 32 34 files = ( 33 - E9B512A52D862B0F00D52AB6 /* MLKitBarcodeScanning in Frameworks */, 34 - E9B512A72D862B0F00D52AB6 /* MLKitFaceDetection in Frameworks */, 35 + 00FBAE687F5BCD32801C67BA /* Pods_iosApp.framework in Frameworks */, 35 36 ); 36 37 runOnlyForDeploymentPostprocessing = 0; 37 38 }; ··· 46 47 path = "Preview Content"; 47 48 sourceTree = "<group>"; 48 49 }; 50 + 2A2D659B26830710A9A8E8BE /* Pods */ = { 51 + isa = PBXGroup; 52 + children = ( 53 + 95ADA54C8FE56AF9CA35ABD0 /* Pods-iosApp.debug.xcconfig */, 54 + F7B9AA6F955C972BAA204DE2 /* Pods-iosApp.release.xcconfig */, 55 + ); 56 + path = Pods; 57 + sourceTree = "<group>"; 58 + }; 49 59 42799AB246E5F90AF97AA0EF /* Frameworks */ = { 50 60 isa = PBXGroup; 51 61 children = ( 62 + 07734A1087237371E675AE3E /* Pods_iosApp.framework */, 52 63 ); 53 64 name = Frameworks; 54 65 sourceTree = "<group>"; ··· 60 71 7555FF7D242A565900829871 /* iosApp */, 61 72 7555FF7C242A565900829871 /* Products */, 62 73 42799AB246E5F90AF97AA0EF /* Frameworks */, 74 + 2A2D659B26830710A9A8E8BE /* Pods */, 63 75 ); 64 76 sourceTree = "<group>"; 65 77 }; ··· 98 110 isa = PBXNativeTarget; 99 111 buildConfigurationList = 7555FFA5242A565B00829871 /* Build configuration list for PBXNativeTarget "iosApp" */; 100 112 buildPhases = ( 113 + 7A2DFE5500B1B3090857DA1E /* [CP] Check Pods Manifest.lock */, 101 114 F36B1CEB2AD83DDC00CB74D5 /* Compile Kotlin Framework */, 102 115 7555FF77242A565900829871 /* Sources */, 103 116 B92378962B6B1156000C7307 /* Frameworks */, 104 117 7555FF79242A565900829871 /* Resources */, 118 + 05A9CE664DBE199AF7FBD83E /* [CP] Embed Pods Frameworks */, 119 + 97BC41D49C174C6809B569F2 /* [CP] Copy Pods Resources */, 105 120 ); 106 121 buildRules = ( 107 122 ); 108 123 dependencies = ( 109 124 ); 110 125 name = iosApp; 111 - packageProductDependencies = ( 112 - E9B512A42D862B0F00D52AB6 /* MLKitBarcodeScanning */, 113 - E9B512A62D862B0F00D52AB6 /* MLKitFaceDetection */, 114 - ); 115 126 productName = iosApp; 116 127 productReference = 7555FF7B242A565900829871 /* myrus-mobile.app */; 117 128 productType = "com.apple.product-type.application"; ··· 141 152 Base, 142 153 ); 143 154 mainGroup = 7555FF72242A565900829871; 144 - packageReferences = ( 145 - E9B512A32D862B0F00D52AB6 /* XCRemoteSwiftPackageReference "google-mlkit-swiftpm" */, 146 - ); 147 155 productRefGroup = 7555FF7C242A565900829871 /* Products */; 148 156 projectDirPath = ""; 149 157 projectRoot = ""; ··· 166 174 /* End PBXResourcesBuildPhase section */ 167 175 168 176 /* Begin PBXShellScriptBuildPhase section */ 177 + 05A9CE664DBE199AF7FBD83E /* [CP] Embed Pods Frameworks */ = { 178 + isa = PBXShellScriptBuildPhase; 179 + buildActionMask = 2147483647; 180 + files = ( 181 + ); 182 + inputFileListPaths = ( 183 + "${PODS_ROOT}/Target Support Files/Pods-iosApp/Pods-iosApp-frameworks-${CONFIGURATION}-input-files.xcfilelist", 184 + ); 185 + name = "[CP] Embed Pods Frameworks"; 186 + outputFileListPaths = ( 187 + "${PODS_ROOT}/Target Support Files/Pods-iosApp/Pods-iosApp-frameworks-${CONFIGURATION}-output-files.xcfilelist", 188 + ); 189 + runOnlyForDeploymentPostprocessing = 0; 190 + shellPath = /bin/sh; 191 + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-iosApp/Pods-iosApp-frameworks.sh\"\n"; 192 + showEnvVarsInLog = 0; 193 + }; 194 + 7A2DFE5500B1B3090857DA1E /* [CP] Check Pods Manifest.lock */ = { 195 + isa = PBXShellScriptBuildPhase; 196 + buildActionMask = 2147483647; 197 + files = ( 198 + ); 199 + inputFileListPaths = ( 200 + ); 201 + inputPaths = ( 202 + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 203 + "${PODS_ROOT}/Manifest.lock", 204 + ); 205 + name = "[CP] Check Pods Manifest.lock"; 206 + outputFileListPaths = ( 207 + ); 208 + outputPaths = ( 209 + "$(DERIVED_FILE_DIR)/Pods-iosApp-checkManifestLockResult.txt", 210 + ); 211 + runOnlyForDeploymentPostprocessing = 0; 212 + shellPath = /bin/sh; 213 + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 214 + showEnvVarsInLog = 0; 215 + }; 216 + 97BC41D49C174C6809B569F2 /* [CP] Copy Pods Resources */ = { 217 + isa = PBXShellScriptBuildPhase; 218 + buildActionMask = 2147483647; 219 + files = ( 220 + ); 221 + inputFileListPaths = ( 222 + "${PODS_ROOT}/Target Support Files/Pods-iosApp/Pods-iosApp-resources-${CONFIGURATION}-input-files.xcfilelist", 223 + ); 224 + name = "[CP] Copy Pods Resources"; 225 + outputFileListPaths = ( 226 + "${PODS_ROOT}/Target Support Files/Pods-iosApp/Pods-iosApp-resources-${CONFIGURATION}-output-files.xcfilelist", 227 + ); 228 + runOnlyForDeploymentPostprocessing = 0; 229 + shellPath = /bin/sh; 230 + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-iosApp/Pods-iosApp-resources.sh\"\n"; 231 + showEnvVarsInLog = 0; 232 + }; 169 233 F36B1CEB2AD83DDC00CB74D5 /* Compile Kotlin Framework */ = { 170 234 isa = PBXShellScriptBuildPhase; 171 235 buildActionMask = 2147483647; ··· 321 385 }; 322 386 7555FFA6242A565B00829871 /* Debug */ = { 323 387 isa = XCBuildConfiguration; 388 + baseConfigurationReference = 95ADA54C8FE56AF9CA35ABD0 /* Pods-iosApp.debug.xcconfig */; 324 389 buildSettings = { 325 390 ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 326 391 CODE_SIGN_IDENTITY = "Apple Development"; 327 392 CODE_SIGN_STYLE = Automatic; 328 393 DEVELOPMENT_ASSET_PATHS = "\"iosApp/Preview Content\""; 329 - DEVELOPMENT_TEAM = "${TEAM_ID}"; 394 + DEVELOPMENT_TEAM = BR926NY9FS; 330 395 ENABLE_PREVIEWS = YES; 331 396 FRAMEWORK_SEARCH_PATHS = ( 332 397 "$(inherited)", 333 398 "$(SRCROOT)/../shared/build/xcode-frameworks/$(CONFIGURATION)/$(SDK_NAME)\n$(SRCROOT)/../composeApp/build/xcode-frameworks/$(CONFIGURATION)/$(SDK_NAME)", 334 399 ); 335 400 INFOPLIST_FILE = iosApp/Info.plist; 336 - IPHONEOS_DEPLOYMENT_TARGET = 15.3; 401 + IPHONEOS_DEPLOYMENT_TARGET = 18.0; 337 402 LD_RUNPATH_SEARCH_PATHS = ( 338 403 "$(inherited)", 339 404 "@executable_path/Frameworks", ··· 348 413 }; 349 414 7555FFA7242A565B00829871 /* Release */ = { 350 415 isa = XCBuildConfiguration; 416 + baseConfigurationReference = F7B9AA6F955C972BAA204DE2 /* Pods-iosApp.release.xcconfig */; 351 417 buildSettings = { 352 418 ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 353 419 CODE_SIGN_IDENTITY = "Apple Development"; 354 420 CODE_SIGN_STYLE = Automatic; 355 421 DEVELOPMENT_ASSET_PATHS = "\"iosApp/Preview Content\""; 356 - DEVELOPMENT_TEAM = "${TEAM_ID}"; 422 + DEVELOPMENT_TEAM = BR926NY9FS; 357 423 ENABLE_PREVIEWS = YES; 358 424 FRAMEWORK_SEARCH_PATHS = ( 359 425 "$(inherited)", 360 426 "$(SRCROOT)/../shared/build/xcode-frameworks/$(CONFIGURATION)/$(SDK_NAME)\n$(SRCROOT)/../composeApp/build/xcode-frameworks/$(CONFIGURATION)/$(SDK_NAME)", 361 427 ); 362 428 INFOPLIST_FILE = iosApp/Info.plist; 363 - IPHONEOS_DEPLOYMENT_TARGET = 15.3; 429 + IPHONEOS_DEPLOYMENT_TARGET = 18.0; 364 430 LD_RUNPATH_SEARCH_PATHS = ( 365 431 "$(inherited)", 366 432 "@executable_path/Frameworks", ··· 395 461 defaultConfigurationName = Release; 396 462 }; 397 463 /* End XCConfigurationList section */ 398 - 399 - /* Begin XCRemoteSwiftPackageReference section */ 400 - E9B512A32D862B0F00D52AB6 /* XCRemoteSwiftPackageReference "google-mlkit-swiftpm" */ = { 401 - isa = XCRemoteSwiftPackageReference; 402 - repositoryURL = "https://github.com/d-date/google-mlkit-swiftpm"; 403 - requirement = { 404 - kind = upToNextMajorVersion; 405 - minimumVersion = 6.0.0; 406 - }; 407 - }; 408 - /* End XCRemoteSwiftPackageReference section */ 409 - 410 - /* Begin XCSwiftPackageProductDependency section */ 411 - E9B512A42D862B0F00D52AB6 /* MLKitBarcodeScanning */ = { 412 - isa = XCSwiftPackageProductDependency; 413 - package = E9B512A32D862B0F00D52AB6 /* XCRemoteSwiftPackageReference "google-mlkit-swiftpm" */; 414 - productName = MLKitBarcodeScanning; 415 - }; 416 - E9B512A62D862B0F00D52AB6 /* MLKitFaceDetection */ = { 417 - isa = XCSwiftPackageProductDependency; 418 - package = E9B512A32D862B0F00D52AB6 /* XCRemoteSwiftPackageReference "google-mlkit-swiftpm" */; 419 - productName = MLKitFaceDetection; 420 - }; 421 - /* End XCSwiftPackageProductDependency section */ 422 464 }; 423 465 rootObject = 7555FF73242A565900829871 /* Project object */; 424 466 }
-60
mobile/iosApp/iosApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
··· 1 - { 2 - "originHash" : "891cce1a2e5740e9da5b83f5ae8318d7657deb75e81f6a5d7be479fbdadf82e9", 3 - "pins" : [ 4 - { 5 - "identity" : "google-mlkit-swiftpm", 6 - "kind" : "remoteSourceControl", 7 - "location" : "https://github.com/d-date/google-mlkit-swiftpm", 8 - "state" : { 9 - "revision" : "064418cee64470417fbc5c726e22815dc92c56c9", 10 - "version" : "6.0.0" 11 - } 12 - }, 13 - { 14 - "identity" : "googledatatransport", 15 - "kind" : "remoteSourceControl", 16 - "location" : "https://github.com/google/GoogleDataTransport.git", 17 - "state" : { 18 - "revision" : "a637d318ae7ae246b02d7305121275bc75ed5565", 19 - "version" : "9.4.0" 20 - } 21 - }, 22 - { 23 - "identity" : "googleutilities", 24 - "kind" : "remoteSourceControl", 25 - "location" : "https://github.com/google/GoogleUtilities.git", 26 - "state" : { 27 - "revision" : "8e5d57ed87057cd7b0e4e8f474d9e78f73eb85f7", 28 - "version" : "7.13.2" 29 - } 30 - }, 31 - { 32 - "identity" : "gtm-session-fetcher", 33 - "kind" : "remoteSourceControl", 34 - "location" : "https://github.com/google/gtm-session-fetcher.git", 35 - "state" : { 36 - "revision" : "0382ca27f22fb3494cf657d8dc356dc282cd1193", 37 - "version" : "3.4.1" 38 - } 39 - }, 40 - { 41 - "identity" : "nanopb", 42 - "kind" : "remoteSourceControl", 43 - "location" : "https://github.com/firebase/nanopb.git", 44 - "state" : { 45 - "revision" : "b7e1104502eca3a213b46303391ca4d3bc8ddec1", 46 - "version" : "2.30910.0" 47 - } 48 - }, 49 - { 50 - "identity" : "promises", 51 - "kind" : "remoteSourceControl", 52 - "location" : "https://github.com/google/promises.git", 53 - "state" : { 54 - "revision" : "540318ecedd63d883069ae7f1ed811a2df00b6ac", 55 - "version" : "2.4.0" 56 - } 57 - } 58 - ], 59 - "version" : 3 60 - }
+10
mobile/iosApp/iosApp.xcworkspace/contents.xcworkspacedata
··· 1 + <?xml version="1.0" encoding="UTF-8"?> 2 + <Workspace 3 + version = "1.0"> 4 + <FileRef 5 + location = "group:iosApp.xcodeproj"> 6 + </FileRef> 7 + <FileRef 8 + location = "group:Pods/Pods.xcodeproj"> 9 + </FileRef> 10 + </Workspace>
+88 -1
mobile/iosApp/iosApp/ContentView.swift
··· 1 1 import UIKit 2 2 import SwiftUI 3 3 import ComposeApp 4 + import MLKitFaceDetection 5 + import MLKitCommon 6 + import MLKitVision 7 + import AVKit 8 + import Vision 9 + 4 10 5 11 struct ComposeView: UIViewControllerRepresentable { 6 12 func makeUIViewController(context: Context) -> UIViewController { 7 - MainViewControllerKt.MainViewController() 13 + NativeAnalyzer.shared.analyzeImageNative = { img, callback in 14 + Analyzer().analyzeImageNative(img: NativeAnalyzer.shared.byteArrayToData(byteArray: img)) { rect, size in 15 + _ = callback(rect, size) 16 + } 17 + } 18 + return MainViewControllerKt.MainViewController() 8 19 } 9 20 10 21 func updateUIViewController(_ uiViewController: UIViewController, context: Context) {} 22 + } 23 + 24 + class Analyzer { 25 + @available(iOS 18.0, *) 26 + func analyzeImageNative(img: Data, callback: @escaping (ComposeApp.Rect, ComposeApp.Size) -> ()) { 27 + // let options = FaceDetectorOptions() 28 + // options.performanceMode = .fast 29 + // options.landmarkMode = .none 30 + // options.classificationMode = .none 31 + // 32 + // do { 33 + // if let image = UIImage(data: img) { 34 + // let vizImg = VisionImage(image: image) 35 + //// UIImageWriteToSavedPhotosAlbum(image, self, nil, nil) 36 + // vizImg.orientation = Analyzer.imageOrientation(deviceOrientation: .portrait, cameraPosition: .front) 37 + // 38 + // let faceDetector = FaceDetector.faceDetector(options: options) 39 + // faceDetector.process(vizImg) { faces, err in 40 + // 41 + // if err == nil { 42 + // if let faces { 43 + // print("faces not nil: \(faces.count)") 44 + // for face in faces { 45 + // callback(Rect(top: Float(face.frame.minY), left: Float(face.frame.minX), bottom: Float(face.frame.maxY), right: Float(face.frame.maxX)), Size(width: Float(image.size.width), height: Float(image.size.height))) 46 + // 47 + // } 48 + // } else { 49 + // print("faces nil") 50 + // } 51 + // } else { 52 + // print("skipping frame - error processing image: \(err)") 53 + // } 54 + // } 55 + // 56 + // 57 + // } 58 + // } catch { 59 + // print("skipping frame - error processing image: \(error)") 60 + // } 61 + if let image = UIImage(data: img) { 62 + UIImageWriteToSavedPhotosAlbum(image, self, nil, nil) 63 + let faceDetectionRequest = DetectFaceRectanglesRequest() 64 + Task { 65 + do { 66 + let res = try await faceDetectionRequest.perform(on: CIImage(image: image)!) 67 + res.forEach { face in 68 + 69 + var newSize = face.boundingBox.toImageCoordinates(image.size, origin: .upperLeft) 70 + print("FACE RESULT @ \(newSize)") 71 + callback(Rect(top: Float(newSize.minY), left: Float(newSize.minX), bottom: 0, right: 0), Size(width: Float(image.size.width), height: Float(image.size.height))) 72 + } 73 + } catch { 74 + print("error processing image: \(error)") 75 + } 76 + } 77 + } 78 + 79 + 80 + } 81 + static func imageOrientation( 82 + deviceOrientation: UIDeviceOrientation, 83 + cameraPosition: AVCaptureDevice.Position 84 + ) -> UIImage.Orientation { 85 + switch deviceOrientation { 86 + case .portrait: 87 + return cameraPosition == .front ? .leftMirrored : .right 88 + case .landscapeLeft: 89 + return cameraPosition == .front ? .downMirrored : .up 90 + case .portraitUpsideDown: 91 + return cameraPosition == .front ? .rightMirrored : .left 92 + case .landscapeRight: 93 + return cameraPosition == .front ? .upMirrored : .down 94 + case .faceDown, .faceUp, .unknown: 95 + return .up 96 + } 97 + } 11 98 } 12 99 13 100 struct ContentView: View {