Skip to content

Commit 9b34ed0

Browse files
author
Atharva Vaidya
committed
chore: formatter run
1 parent 89e1b2f commit 9b34ed0

File tree

9 files changed

+46
-49
lines changed

9 files changed

+46
-49
lines changed

Package.swift

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,22 @@ import PackageDescription
66
let package = Package(
77
name: "CompilerSwiftAI",
88
platforms: [
9-
.macOS(.v15), .iOS(.v18)
9+
.macOS(.v15), .iOS(.v18),
1010
],
1111
products: [
1212
// Products define the executables and libraries a package produces, making them visible to other packages.
1313
.library(
1414
name: "CompilerSwiftAI",
15-
targets: ["CompilerSwiftAI"]),
15+
targets: ["CompilerSwiftAI"]
16+
),
1617
],
1718
dependencies: [],
1819
targets: [
1920
// Targets are the basic building blocks of a package, defining a module or a test suite.
2021
// Targets can depend on other targets in this package and products from dependencies.
2122
.target(
22-
name: "CompilerSwiftAI", dependencies: []),
23+
name: "CompilerSwiftAI", dependencies: []
24+
),
2325
.testTarget(
2426
name: "CompilerSwiftAITests",
2527
dependencies: ["CompilerSwiftAI"]

Sources/CompilerSwiftAI/ChatView.swift

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,28 +3,27 @@
33
import SwiftUI
44

55
public struct ChatView<AppState: Encodable & Sendable, Parameters: Decodable & Sendable>: View {
6-
76
@State var model = ChatViewModel()
87
@State private var speechService = SpeechRecognitionService()
9-
8+
109
var state: AppState
1110
var service: Service
1211
var describe: (Function<Parameters>) -> String
13-
var execute: (Function<Parameters>) -> ()
14-
15-
public init(state: AppState, service: Service, describe: @escaping (Function<Parameters>) -> String, execute: @escaping (Function<Parameters>) -> ()) {
12+
var execute: (Function<Parameters>) -> Void
13+
14+
public init(state: AppState, service: Service, describe: @escaping (Function<Parameters>) -> String, execute: @escaping (Function<Parameters>) -> Void) {
1615
self.state = state
1716
self.service = service
1817
self.describe = describe
1918
self.execute = execute
2019
}
21-
20+
2221
func process(prompt: String) {
2322
Task {
2423
model.addStep("Sending request to Compiler")
2524
guard let functions: [Function<Parameters>] = try? await service.processFunction(prompt, for: state) else { return }
2625
model.completeLastStep()
27-
26+
2827
for function in functions {
2928
model.addStep(describe(function))
3029
execute(function)

Sources/CompilerSwiftAI/ChatViewModel.swift

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,29 +8,29 @@ class ChatViewModel {
88
var inputText = ""
99
var isRecording = false
1010
var processingSteps: [ProcessingStep] = []
11-
11+
1212
var speechService: SpeechRecognitionService?
13-
13+
1414
func setupSpeechHandlers() {
1515
speechService?.onTranscript = { [weak self] transcript in
1616
Task { @MainActor in
1717
self?.inputText = transcript
1818
}
1919
}
20-
20+
2121
speechService?.onError = { [weak self] error in
2222
Task { @MainActor in
2323
print("Speech recognition error: \(error.localizedDescription)")
2424
self?.isRecording = false
2525
}
2626
}
27-
27+
2828
// Directly observe isRecording
2929
if let service = speechService {
3030
isRecording = service.isRecording
3131
}
3232
}
33-
33+
3434
func toggleRecording() {
3535
if isRecording {
3636
speechService?.stopRecording()
@@ -42,11 +42,11 @@ class ChatViewModel {
4242
isRecording = service.isRecording
4343
}
4444
}
45-
45+
4646
func addStep(_ description: String) {
4747
processingSteps.append(ProcessingStep(text: description, isComplete: false))
4848
}
49-
49+
5050
func completeLastStep() {
5151
guard let index = processingSteps.indices.last else { return }
5252
processingSteps[index].isComplete = true

Sources/CompilerSwiftAI/ProcessingStep.swift

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,10 @@
33
import Foundation
44

55
public struct ProcessingStep: Identifiable, Hashable {
6-
76
public let id = UUID()
87
public let text: String
98
public var isComplete: Bool
10-
9+
1110
public init(text: String, isComplete: Bool) {
1211
self.text = text
1312
self.isComplete = isComplete

Sources/CompilerSwiftAI/ProcessingStepsView.swift

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,8 @@
33
import SwiftUI
44

55
struct ProcessingStepsView: View {
6-
76
var steps: [ProcessingStep]
8-
7+
98
var body: some View {
109
// Processing Steps Area
1110
VStack(alignment: .leading, spacing: 4) {

Sources/CompilerSwiftAI/Service.swift

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import Foundation
66
public struct Function<Parameters>: Decodable, Sendable where Parameters: Decodable & Sendable {
77
public let name: String
88
public let parameters: Parameters?
9-
9+
1010
private enum CodingKeys: String, CodingKey {
1111
case name = "function"
1212
case parameters
@@ -80,5 +80,4 @@ public final actor Service {
8080
throw error
8181
}
8282
}
83-
8483
}
Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,22 @@
1-
import Speech
21
import Foundation
32
import Observation
3+
import Speech
44

55
@Observable
66
class SpeechRecognitionService {
77
private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "en-US"))
88
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
99
private var recognitionTask: SFSpeechRecognitionTask?
1010
private let audioEngine = AVAudioEngine()
11-
11+
1212
var isRecording = false
1313
var onTranscript: ((String) -> Void)?
1414
var onError: ((Error) -> Void)?
15-
15+
1616
init() {
1717
requestAuthorization()
1818
}
19-
19+
2020
private func requestAuthorization() {
2121
SFSpeechRecognizer.requestAuthorization { status in
2222
DispatchQueue.main.async {
@@ -32,48 +32,48 @@ class SpeechRecognitionService {
3232
}
3333
}
3434
}
35-
35+
3636
func startRecording() throws {
3737
// Cancel any ongoing task
3838
recognitionTask?.cancel()
3939
recognitionTask = nil
40-
40+
4141
#if !os(macOS)
42-
// Configure audio session
43-
let audioSession = AVAudioSession.sharedInstance()
44-
try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
45-
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
42+
// Configure audio session
43+
let audioSession = AVAudioSession.sharedInstance()
44+
try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
45+
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
4646
#endif
47-
47+
4848
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
49-
49+
5050
guard let recognitionRequest = recognitionRequest else { return }
51-
51+
5252
let inputNode = audioEngine.inputNode
5353
recognitionRequest.shouldReportPartialResults = true
54-
54+
5555
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest) { [weak self] result, error in
5656
if let error = error {
5757
self?.onError?(error)
5858
self?.stopRecording()
5959
return
6060
}
61-
61+
6262
if let result = result {
6363
self?.onTranscript?(result.bestTranscription.formattedString)
6464
}
6565
}
66-
66+
6767
let recordingFormat = inputNode.outputFormat(forBus: 0)
6868
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
6969
recognitionRequest.append(buffer)
7070
}
71-
71+
7272
audioEngine.prepare()
7373
try audioEngine.start()
7474
isRecording = true
7575
}
76-
76+
7777
func stopRecording() {
7878
audioEngine.stop()
7979
audioEngine.inputNode.removeTap(onBus: 0)
@@ -83,4 +83,4 @@ class SpeechRecognitionService {
8383
recognitionTask = nil
8484
isRecording = false
8585
}
86-
}
86+
}

Sources/CompilerSwiftAI/TextInputView.swift

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,9 @@
33
import SwiftUI
44

55
struct TextInputView: View {
6-
76
@Bindable var model: ChatViewModel
8-
var process: (String) -> ()
9-
7+
var process: (String) -> Void
8+
109
var body: some View {
1110
VStack(spacing: 8) {
1211
Text("Prompt")
@@ -16,8 +15,8 @@ struct TextInputView: View {
1615

1716
ZStack(alignment: .topLeading) {
1817
TextEditor(text: $model.inputText)
19-
.padding(.horizontal, 4)
20-
.padding(.vertical, 8)
18+
.padding(.horizontal, 4)
19+
.padding(.vertical, 8)
2120
}
2221
.frame(height: 100)
2322
.foregroundStyle(DLMColors.primary100)
@@ -37,7 +36,7 @@ struct TextInputView: View {
3736
.foregroundColor(.white)
3837
.cornerRadius(8)
3938
}
40-
39+
4140
Button(action: {
4241
model.speechService?.stopRecording()
4342
process(model.inputText)

Tests/CompilerSwiftAITests/CompilerSwiftAITests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import Testing
21
@testable import CompilerSwiftAI
2+
import Testing
33

44
@Test func example() async throws {
55
// Write your test here and use APIs like `#expect(...)` to check expected conditions.

0 commit comments

Comments
 (0)