Save the date - Google I/O returns May 18-20. Register to get the most out of the digital experience: Build your schedule, reserve space, participate in Q&As, earn Google Developer profile badges, and more. Register now
本頁面由 Cloud Translation API 翻譯而成。
Switch to English

從舊的自定義模型API遷移

Firebase/MLModelInterpreter庫的0.20.0版引入了一個新的getLatestModelFilePath()方法,該方法獲取自定義模型在設備上的位置。您可以使用此方法直接實例化TensorFlow Lite Interpreter對象,您可以使用該對象代替Firebase的ModelInterpreter包裝器。

展望未來,這是首選方法。由於TensorFlow Lite解釋器版本不再與Firebase庫版本配合使用,因此您可以在需要時擁有更大的靈活性來升級到TensorFlow Lite的新版本,或者更輕鬆地使用自定義TensorFlow Lite構建。

該頁面顯示瞭如何從使用ModelInterpreter遷移到TensorFlow Lite Interpreter

1.更新項目依賴項

更新項目的Podfile,以包括Firebase/MLModelInterpreter庫(或更高版本)和TensorFlow Lite庫的0.20.0版本:

迅速

pod 'Firebase/MLModelInterpreter', '0.19.0'

物鏡

pod 'Firebase/MLModelInterpreter', '0.19.0'

迅速

pod 'Firebase/MLModelInterpreter', '~> 0.20.0'
pod 'TensorFlowLiteSwift'

物鏡

pod 'Firebase/MLModelInterpreter', '~> 0.20.0'
pod 'TensorFlowLiteObjC'

2.創建一個TensorFlow Lite解釋器,而不是Firebase ModelInterpreter

無需創建Firebase ModelInterpreter ,而是使用getLatestModelFilePath()獲取設備上模型的位置,然後使用它來創建TensorFlow Lite Interpreter

迅速

let remoteModel = CustomRemoteModel(
    name: "your_remote_model"  // The name you assigned in the Firebase console.
)
interpreter = ModelInterpreter.modelInterpreter(remoteModel: remoteModel)

物鏡

// Initialize using the name you assigned in the Firebase console.
FIRCustomRemoteModel *remoteModel =
        [[FIRCustomRemoteModel alloc] initWithName:@"your_remote_model"];
interpreter = [FIRModelInterpreter modelInterpreterForRemoteModel:remoteModel];

迅速

let remoteModel = CustomRemoteModel(
    name: "your_remote_model"  // The name you assigned in the Firebase console.
)
ModelManager.modelManager().getLatestModelFilePath(remoteModel) { (remoteModelPath, error) in
    guard error == nil, let remoteModelPath = remoteModelPath else { return }
    do {
        interpreter = try Interpreter(modelPath: remoteModelPath)
    } catch {
        // Error?
    }
}

物鏡

FIRCustomRemoteModel *remoteModel =
        [[FIRCustomRemoteModel alloc] initWithName:@"your_remote_model"];
[[FIRModelManager modelManager] getLatestModelFilePath:remoteModel
                                            completion:^(NSString * _Nullable filePath,
                                                         NSError * _Nullable error) {
    if (error != nil || filePath == nil) { return; }

    NSError *tfError = nil;
    interpreter = [[TFLInterpreter alloc] initWithModelPath:filePath error:&tfError];
}];

3.更新輸入和輸出準備代碼

使用ModelInterpreter ,您可以通過在運行模型時將ModelInputOutputOptions對像傳遞給解釋器來指定模型的輸入和輸出形狀。

對於TensorFlow Lite解釋器,您可以調用allocateTensors()為模型的輸入和輸出分配空間,然後將輸入數據複製到輸入張量中。

例如,如果模型的輸入形狀為[1 224 224 3] float值,而輸出形狀為[1 1000] float值,請進行以下更改:

迅速

let ioOptions = ModelInputOutputOptions()
do {
    try ioOptions.setInputFormat(
        index: 0,
        type: .float32,
        dimensions: [1, 224, 224, 3]
    )
    try ioOptions.setOutputFormat(
        index: 0,
        type: .float32,
        dimensions: [1, 1000]
    )
} catch let error as NSError {
    print("Failed to set input or output format with error: \(error.localizedDescription)")
}

let inputs = ModelInputs()
do {
    let inputData = Data()
    // Then populate with input data.

    try inputs.addInput(inputData)
} catch let error {
    print("Failed to add input: \(error)")
}

interpreter.run(inputs: inputs, options: ioOptions) { outputs, error in
    guard error == nil, let outputs = outputs else { return }
    // Process outputs
    // ...
}

物鏡

FIRModelInputOutputOptions *ioOptions = [[FIRModelInputOutputOptions alloc] init];
NSError *error;
[ioOptions setInputFormatForIndex:0
                             type:FIRModelElementTypeFloat32
                       dimensions:@[@1, @224, @224, @3]
                            error:&error];
if (error != nil) { return; }
[ioOptions setOutputFormatForIndex:0
                              type:FIRModelElementTypeFloat32
                        dimensions:@[@1, @1000]
                             error:&error];
if (error != nil) { return; }

FIRModelInputs *inputs = [[FIRModelInputs alloc] init];
NSMutableData *inputData = [[NSMutableData alloc] initWithCapacity:0];
// Then populate with input data.

[inputs addInput:inputData error:&error];
if (error != nil) { return; }

[interpreter runWithInputs:inputs
                   options:ioOptions
                completion:^(FIRModelOutputs * _Nullable outputs,
                             NSError * _Nullable error) {
  if (error != nil || outputs == nil) {
    return;
  }
  // Process outputs
  // ...
}];

迅速

do {
    try interpreter.allocateTensors()

    let inputData = Data()
    // Then populate with input data.

    try interpreter.copy(inputData, toInputAt: 0)

    try interpreter.invoke()
} catch let err {
    print(err.localizedDescription)
}

物鏡

NSError *error = nil;

[interpreter allocateTensorsWithError:&error];
if (error != nil) { return; }

TFLTensor *input = [interpreter inputTensorAtIndex:0 error:&error];
if (error != nil) { return; }

NSMutableData *inputData = [[NSMutableData alloc] initWithCapacity:0];
// Then populate with input data.

[input copyData:inputData error:&error];
if (error != nil) { return; }

[interpreter invokeWithError:&error];
if (error != nil) { return; }

4.更新輸出處理代碼

最後,不要使用ModelOutputs對象的output()方法獲取模型的輸出,而是從解釋器獲取輸出張量並將其數據轉換為適合您的使用案例的任何結構。

例如,如果您要進行分類,則可以進行如下更改:

迅速

let output = try? outputs.output(index: 0) as? [[NSNumber]]
let probabilities = output?[0]

guard let labelPath = Bundle.main.path(
    forResource: "custom_labels",
    ofType: "txt"
) else { return }
let fileContents = try? String(contentsOfFile: labelPath)
guard let labels = fileContents?.components(separatedBy: "\n") else { return }

for i in 0 ..< labels.count {
    if let probability = probabilities?[i] {
        print("\(labels[i]): \(probability)")
    }
}

物鏡

// Get first and only output of inference with a batch size of 1
NSError *error;
NSArray *probabilites = [outputs outputAtIndex:0 error:&error][0];
if (error != nil) { return; }

NSString *labelPath = [NSBundle.mainBundle pathForResource:@"retrained_labels"
                                                    ofType:@"txt"];
NSString *fileContents = [NSString stringWithContentsOfFile:labelPath
                                                   encoding:NSUTF8StringEncoding
                                                      error:&error];
if (error != nil || fileContents == NULL) { return; }
NSArray<NSString *> *labels = [fileContents componentsSeparatedByString:@"\n"];
for (int i = 0; i < labels.count; i++) {
    NSString *label = labels[i];
    NSNumber *probability = probabilites[i];
    NSLog(@"%@: %f", label, probability.floatValue);
}

迅速

do {
    // After calling interpreter.invoke():
    let output = try interpreter.output(at: 0)
    let probabilities =
            UnsafeMutableBufferPointer<Float32>.allocate(capacity: 1000)
    output.data.copyBytes(to: probabilities)

    guard let labelPath = Bundle.main.path(
        forResource: "custom_labels",
        ofType: "txt"
    ) else { return }
    let fileContents = try? String(contentsOfFile: labelPath)
    guard let labels = fileContents?.components(separatedBy: "\n") else { return }

    for i in labels.indices {
        print("\(labels[i]): \(probabilities[i])")
    }
} catch let err {
    print(err.localizedDescription)
}

物鏡

NSError *error = nil;

TFLTensor *output = [interpreter outputTensorAtIndex:0 error:&error];
if (error != nil) { return; }

NSData *outputData = [output dataWithError:&error];
if (error != nil) { return; }

Float32 probabilities[outputData.length / 4];
[outputData getBytes:&probabilities length:outputData.length];

NSString *labelPath = [NSBundle.mainBundle pathForResource:@"custom_labels"
                                                    ofType:@"txt"];
NSString *fileContents = [NSString stringWithContentsOfFile:labelPath
                                                   encoding:NSUTF8StringEncoding
                                                      error:&error];
if (error != nil || fileContents == nil) { return; }

NSArray<NSString *> *labels = [fileContents componentsSeparatedByString:@"\n"];
for (int i = 0; i < labels.count; i++) {
    NSLog(@"%@: %f", labels[i], probabilities[i]);
}