feat: 添加README以及example项目,修复plugin的路径,新建basic_intl的本地依赖

This commit is contained in:
2025-09-10 15:35:07 +08:00
parent e39f42a3df
commit 4176116bb3
56 changed files with 2841 additions and 395 deletions

2
android/.gitignore vendored
View File

@@ -12,3 +12,5 @@ GeneratedPluginRegistrant.java
key.properties
**/*.keystore
**/*.jks
/build/
/src/build/

View File

@@ -1,54 +0,0 @@
plugins {
id("com.android.application")
id("kotlin-android")
// The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins.
id("dev.flutter.flutter-gradle-plugin")
}
android {
namespace = "com.example.ai_chat_assistant"
compileSdk = flutter.compileSdkVersion
ndkVersion = "29.0.13599879"
compileOptions {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
}
kotlinOptions {
jvmTarget = JavaVersion.VERSION_11.toString()
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId = "com.example.ai_chat_assistant"
// You can update the following values to match your application needs.
// For more information, see: https://flutter.dev/to/review-gradle-config.
minSdk = 24
targetSdk = flutter.targetSdkVersion
versionCode = flutter.versionCode
versionName = flutter.versionName
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig = signingConfigs.getByName("debug")
}
}
repositories {
flatDir {
dirs("libs")
}
}
}
flutter {
source = "../.."
}
dependencies {
implementation(files("libs/fastjson-1.1.46.android.jar"))
implementation(files("libs/nuisdk-release.aar"))
}

View File

@@ -1,8 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.intent.action.TTS_SERVICE" />
</manifest>

View File

@@ -1,48 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:label="ai_chat_assistant"
android:name="${applicationName}"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:taskAffinity=""
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data
android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme"
/>
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
<!-- Required to query activities that can process text, see:
https://developer.android.com/training/package-visibility and
https://developer.android.com/reference/android/content/Intent#ACTION_PROCESS_TEXT.
In particular, this is used by the Flutter engine in io.flutter.plugin.text.ProcessTextPlugin. -->
<queries>
<intent>
<action android:name="android.intent.action.PROCESS_TEXT"/>
<data android:mimeType="text/plain"/>
</intent>
<intent>
<action android:name="android.intent.action.TTS_SERVICE"/>
</intent>
</queries>
</manifest>

View File

@@ -1,12 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="?android:colorBackground" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

View File

@@ -1,12 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
the Flutter engine draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
<style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
the Flutter engine draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

94
android/build.gradle Normal file
View File

@@ -0,0 +1,94 @@
group 'com.example.ai_chat_assistant'
version '1.0-SNAPSHOT'
buildscript {
ext.kotlin_version = '2.1.20'
repositories {
maven { url rootProject.file("../ai_chat_assistant/android/mavenLocal") }
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:8.11.1'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
maven { url rootProject.file("../ai_chat_assistant/android/mavenLocal") }
google()
mavenCentral()
}
}
apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-kapt'
android {
namespace 'com.example.ai_chat_assistant'
compileSdk 33
compileOptions {
sourceCompatibility JavaVersion.VERSION_17
targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = "17"
}
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
test.java.srcDirs += 'src/test/kotlin'
}
defaultConfig {
minSdkVersion 23
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
}
}
buildFeatures {
dataBinding true
viewBinding = true
}
// 获取配置文件
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') {
localProperties.load(it)
}
}
// 获取flutter sdk 路径
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.")
}
repositories {
// flatDir {
// dirs("libs")
// }
}
dependencies {
// implementation 'com.alibaba.idst:nui-release:1.0.0'
// implementation 'com.alibaba:fastjson:1.2.83'
// compileOnly files("$flutterRoot/bin/cache/artifacts/engine/android-arm/flutter.jar")
implementation(files("libs/fastjson-1.1.46.android.jar"))
implementation(files("libs/nui-release-1.0.0.aar"))
}
}

View File

@@ -2,4 +2,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://mirrors.aliyun.com/macports/distfiles/gradle/gradle-8.12-all.zip
distributionUrl=https\://mirrors.aliyun.com/macports/distfiles/gradle/gradle-8.13-all.zip

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>com.alibaba.idst</groupId>
<artifactId>nui-release</artifactId>
<version>1.0.0</version>
<packaging>aar</packaging>
</project>

1
android/settings.gradle Normal file
View File

@@ -0,0 +1 @@
rootProject.name = 'ai_assistant_plugin'

View File

@@ -0,0 +1,2 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
</manifest>

View File

@@ -1,32 +1,32 @@
package com.example.ai_chat_assistant;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import io.flutter.embedding.android.FlutterActivity;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.plugin.common.MethodChannel;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.idst.nui.AsrResult;
import com.alibaba.idst.nui.Constants;
import com.alibaba.idst.nui.INativeNuiCallback;
import com.alibaba.idst.nui.INativeStreamInputTtsCallback;
import com.alibaba.idst.nui.KwsResult;
import com.alibaba.idst.nui.NativeNui;
package com.example.ai_assistant_plugin;
import java.util.Map;
public class MainActivity extends FlutterActivity implements INativeNuiCallback {
private static final String TTS_CHANNEL = "com.example.ai_chat_assistant/tts";
private static final String ASR_CHANNEL = "com.example.ai_chat_assistant/asr";
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.Log;
import androidx.annotation.NonNull;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.idst.nui.Constants;
import com.alibaba.idst.nui.INativeStreamInputTtsCallback;
import com.alibaba.idst.nui.NativeNui;
/**
* AiAssistantPlugin
*/
public class AiAssistantPlugin implements FlutterPlugin, MethodCallHandler {
private static final String CHANNEL = "com.example.ai_chat_assistant/ali_sdk";
private static final String TAG = "AliyunSDK";
private static final String APP_KEY = "bXFFc1V65iYbW6EF";
private static final String ACCESS_KEY = "LTAI5t71JHxXRvt2mGuEVz9X";
@@ -37,6 +37,15 @@ public class MainActivity extends FlutterActivity implements INativeNuiCallback
private final NativeNui streamInputTtsInstance = new NativeNui(Constants.ModeType.MODE_STREAM_INPUT_TTS);
private final NativeNui asrInstance = new NativeNui();
/// The MethodChannel that will the communication between Flutter and native Android
///
/// This local reference serves to register the plugin with the Flutter Engine and unregister it
/// when the Flutter Engine is detached from the Activity
private MethodChannel channel;
private Handler asrHandler;
private AsrCallBack asrCallBack;
private Handler handler = new Handler(Looper.getMainLooper());
private final AudioPlayer ttsAudioTrack = new AudioPlayer(new AudioPlayerCallback() {
@Override
public void playStart() {
@@ -53,137 +62,66 @@ public class MainActivity extends FlutterActivity implements INativeNuiCallback
}
});
private MethodChannel asrMethodChannel;
private final static int ASR_SAMPLE_RATE = 16000;
private final static int ASR_WAVE_FRAM_SIZE = 20 * 2 * 1 * ASR_SAMPLE_RATE / 1000; //20ms audio for 16k/16bit/mono
private AudioRecord asrAudioRecorder = null;
private boolean asrStopping = false;
private Handler asrHandler;
private String asrText = "";
@Override
public void configureFlutterEngine(FlutterEngine flutterEngine) {
super.configureFlutterEngine(flutterEngine);
new MethodChannel(flutterEngine.getDartExecutor().getBinaryMessenger(), TTS_CHANNEL)
.setMethodCallHandler((call, result) -> {
Map<String, Object> args = (Map<String, Object>) call.arguments;
switch (call.method) {
case "startTts":
Object isChinese = args.get("isChinese");
if (isChinese == null || isChinese.toString().isBlank()) {
return;
}
boolean isSuccess = startTts(Boolean.parseBoolean(isChinese.toString()));
result.success(isSuccess);
break;
case "sendTts":
Object textArg = args.get("text");
if (textArg == null || textArg.toString().isBlank()) {
return;
}
sendTts(textArg.toString());
break;
case "completeTts":
completeTts();
break;
case "stopTts":
stopTts();
break;
default:
result.notImplemented();
break;
}
});
asrMethodChannel = new MethodChannel(flutterEngine.getDartExecutor().getBinaryMessenger(), ASR_CHANNEL);
asrMethodChannel.setMethodCallHandler((call, result) -> {
switch (call.method) {
case "startAsr":
startAsr();
break;
case "stopAsr":
stopAsr();
break;
default:
result.notImplemented();
break;
}
});
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
public void onAttachedToEngine(FlutterPluginBinding flutterPluginBinding) {
channel = new MethodChannel(flutterPluginBinding.getBinaryMessenger(), CHANNEL);
channel.setMethodCallHandler(this);
HandlerThread asrHandlerThread = new HandlerThread("process_thread");
asrHandlerThread.start();
asrHandler = new Handler(asrHandlerThread.getLooper());
}
@Override
protected void onStart() {
Log.i(TAG, "onStart");
super.onStart();
asrInstance.initialize(this, genAsrInitParams(),
asrCallBack = new AsrCallBack(channel);
asrInstance.initialize(asrCallBack, genAsrInitParams(),
Constants.LogLevel.LOG_LEVEL_NONE, false);
}
@Override
protected void onStop() {
Log.i(TAG, "onStop");
super.onStop();
asrInstance.release();
}
@Override
protected void onDestroy() {
super.onDestroy();
public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
channel.setMethodCallHandler(null);
ttsAudioTrack.stop();
ttsAudioTrack.releaseAudioTrack();
streamInputTtsInstance.stopStreamInputTts();
streamInputTtsInstance.release();
// asrInstance.release();
// if (asrCallBack != null) {
// asrCallBack.release();
// asrCallBack = null;
// }
}
private boolean startTts(boolean isChinese) {
int ret = streamInputTtsInstance.startStreamInputTts(new INativeStreamInputTtsCallback() {
@Override
public void onStreamInputTtsEventCallback(INativeStreamInputTtsCallback.StreamInputTtsEvent event, String task_id, String session_id, int ret_code, String error_msg, String timestamp, String all_response) {
Log.i(TAG, "stream input tts event(" + event + ") session id(" + session_id + ") task id(" + task_id + ") retCode(" + ret_code + ") errMsg(" + error_msg + ")");
if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SYNTHESIS_STARTED) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SYNTHESIS_STARTED");
ttsAudioTrack.play();
Log.i(TAG, "start play");
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SENTENCE_SYNTHESIS) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SENTENCE_SYNTHESIS:" + timestamp);
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SYNTHESIS_COMPLETE || event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_TASK_FAILED) {
/*
* 提示: STREAM_INPUT_TTS_EVENT_SYNTHESIS_COMPLETE事件表示TTS已经合成完并通过回调传回了所有音频数据, 而不是表示播放器已经播放完了所有音频数据
*/
Log.i(TAG, "play end");
// 表示推送完数据, 当播放器播放结束则会有playOver回调
ttsAudioTrack.isFinishSend(true);
if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_TASK_FAILED) {
Log.e(TAG, "STREAM_INPUT_TTS_EVENT_TASK_FAILED: " + "error_code(" + ret_code + ") error_message(" + error_msg + ")");
}
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SENTENCE_BEGIN) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SENTENCE_BEGIN:" + all_response);
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SENTENCE_END) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SENTENCE_END:" + all_response);
@Override
public void onMethodCall(MethodCall call, @NonNull Result result) {
Map<String, Object> args = call.arguments();
switch (call.method) {
case "startTts":
Object isChinese = args.get("isChinese");
if (isChinese == null || isChinese.toString().isBlank()) {
return;
}
}
@Override
public void onStreamInputTtsDataCallback(byte[] data) {
if (data.length > 0) {
ttsAudioTrack.setAudioData(data);
boolean isSuccess = startTts(Boolean.parseBoolean(isChinese.toString()));
result.success(isSuccess);
break;
case "sendTts":
Object textArg = args.get("text");
if (textArg == null || textArg.toString().isBlank()) {
return;
}
}
}, genTtsTicket(), genTtsParameters(isChinese), "", Constants.LogLevel.toInt(Constants.LogLevel.LOG_LEVEL_NONE), false);
if (Constants.NuiResultCode.SUCCESS != ret) {
Log.i(TAG, "start tts failed " + ret);
return false;
} else {
return true;
sendTts(textArg.toString());
break;
case "completeTts":
completeTts();
break;
case "stopTts":
stopTts();
break;
case "startAsr":
startAsr();
break;
case "stopAsr":
stopAsr();
break;
default:
result.notImplemented();
break;
}
}
@@ -201,7 +139,6 @@ public class MainActivity extends FlutterActivity implements INativeNuiCallback
}
private void startAsr() {
asrText = "";
asrHandler.post(() -> {
String setParamsString = genAsrParams();
Log.i(TAG, "nui set params " + setParamsString);
@@ -213,105 +150,12 @@ public class MainActivity extends FlutterActivity implements INativeNuiCallback
private void stopAsr() {
asrHandler.post(() -> {
asrStopping = true;
long ret = asrInstance.stopDialog();
runOnUiThread(() -> asrMethodChannel.invokeMethod("onAsrStop", null));
handler.post(() -> channel.invokeMethod("onAsrStop", null));
Log.i(TAG, "cancel dialog " + ret + " end");
});
}
@Override
public void onNuiEventCallback(Constants.NuiEvent event, final int resultCode,
final int arg2, KwsResult kwsResult,
AsrResult asrResult) {
Log.i(TAG, "event=" + event + " resultCode=" + resultCode);
if (event == Constants.NuiEvent.EVENT_TRANSCRIBER_STARTED) {
} else if (event == Constants.NuiEvent.EVENT_TRANSCRIBER_COMPLETE) {
asrStopping = false;
} else if (event == Constants.NuiEvent.EVENT_ASR_PARTIAL_RESULT) {
JSONObject jsonObject = JSON.parseObject(asrResult.allResponse);
JSONObject payload = jsonObject.getJSONObject("payload");
String result = payload.getString("result");
if (asrMethodChannel != null && result != null && !result.isBlank()) {
runOnUiThread(() -> asrMethodChannel.invokeMethod("onAsrResult", asrText + result));
}
} else if (event == Constants.NuiEvent.EVENT_SENTENCE_END) {
JSONObject jsonObject = JSON.parseObject(asrResult.allResponse);
JSONObject payload = jsonObject.getJSONObject("payload");
String result = payload.getString("result");
if (asrMethodChannel != null && result != null && !result.isBlank()) {
asrText += result;
runOnUiThread(() -> asrMethodChannel.invokeMethod("onAsrResult", asrText));
}
} else if (event == Constants.NuiEvent.EVENT_VAD_START) {
} else if (event == Constants.NuiEvent.EVENT_VAD_END) {
} else if (event == Constants.NuiEvent.EVENT_ASR_ERROR) {
asrStopping = false;
} else if (event == Constants.NuiEvent.EVENT_MIC_ERROR) {
asrStopping = false;
} else if (event == Constants.NuiEvent.EVENT_DIALOG_EX) { /* unused */
Log.i(TAG, "dialog extra message = " + asrResult.asrResult);
}
}
//当调用NativeNui的start后会一定时间反复回调该接口底层会提供buffer并告知这次需要数据的长度
//返回值告知底层读了多少数据应该尽量保证return的长度等于需要的长度如果返回<=0则表示出错
@Override
public int onNuiNeedAudioData(byte[] buffer, int len) {
if (asrAudioRecorder == null) {
return -1;
}
if (asrAudioRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "audio recorder not init");
return -1;
}
return asrAudioRecorder.read(buffer, 0, len);
}
//当录音状态发送变化的时候调用
@Override
public void onNuiAudioStateChanged(Constants.AudioState state) {
Log.i(TAG, "onNuiAudioStateChanged");
if (state == Constants.AudioState.STATE_OPEN) {
Log.i(TAG, "audio recorder start");
asrAudioRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
ASR_SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
ASR_WAVE_FRAM_SIZE * 4);
asrAudioRecorder.startRecording();
Log.i(TAG, "audio recorder start done");
} else if (state == Constants.AudioState.STATE_CLOSE) {
Log.i(TAG, "audio recorder close");
if (asrAudioRecorder != null) {
asrAudioRecorder.release();
}
} else if (state == Constants.AudioState.STATE_PAUSE) {
Log.i(TAG, "audio recorder pause");
if (asrAudioRecorder != null) {
asrAudioRecorder.stop();
}
}
}
@Override
public void onNuiAudioRMSChanged(float val) {
// Log.i(TAG, "onNuiAudioRMSChanged vol " + val);
}
@Override
public void onNuiVprEventCallback(Constants.NuiVprEvent event) {
Log.i(TAG, "onNuiVprEventCallback event " + event);
}
@Override
public void onNuiLogTrackCallback(Constants.LogLevel level, String log) {
Log.i(TAG, "onNuiLogTrackCallback log level:" + level + ", message -> " + log);
}
private String genTtsTicket() {
String str = "";
try {
@@ -433,4 +277,51 @@ public class MainActivity extends FlutterActivity implements INativeNuiCallback
Log.i(TAG, "dialog params: " + params);
return params;
}
}
private boolean startTts(boolean isChinese) {
int ret = streamInputTtsInstance.startStreamInputTts(new INativeStreamInputTtsCallback() {
@Override
public void onStreamInputTtsEventCallback(INativeStreamInputTtsCallback.StreamInputTtsEvent event, String task_id, String session_id, int ret_code, String error_msg, String timestamp, String all_response) {
Log.i(TAG, "stream input tts event(" + event + ") session id(" + session_id + ") task id(" + task_id + ") retCode(" + ret_code + ") errMsg(" + error_msg + ")");
if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SYNTHESIS_STARTED) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SYNTHESIS_STARTED");
ttsAudioTrack.play();
Log.i(TAG, "start play");
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SENTENCE_SYNTHESIS) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SENTENCE_SYNTHESIS:" + timestamp);
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SYNTHESIS_COMPLETE || event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_TASK_FAILED) {
/*
* 提示: STREAM_INPUT_TTS_EVENT_SYNTHESIS_COMPLETE事件表示TTS已经合成完并通过回调传回了所有音频数据, 而不是表示播放器已经播放完了所有音频数据
*/
Log.i(TAG, "play end");
// 表示推送完数据, 当播放器播放结束则会有playOver回调
ttsAudioTrack.isFinishSend(true);
if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_TASK_FAILED) {
Log.e(TAG, "STREAM_INPUT_TTS_EVENT_TASK_FAILED: " + "error_code(" + ret_code + ") error_message(" + error_msg + ")");
}
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SENTENCE_BEGIN) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SENTENCE_BEGIN:" + all_response);
} else if (event == StreamInputTtsEvent.STREAM_INPUT_TTS_EVENT_SENTENCE_END) {
Log.i(TAG, "STREAM_INPUT_TTS_EVENT_SENTENCE_END:" + all_response);
}
}
@Override
public void onStreamInputTtsDataCallback(byte[] data) {
if (data.length > 0) {
ttsAudioTrack.setAudioData(data);
}
}
}, genTtsTicket(), genTtsParameters(isChinese), "", Constants.LogLevel.toInt(Constants.LogLevel.LOG_LEVEL_NONE), false);
if (Constants.NuiResultCode.SUCCESS != ret) {
Log.i(TAG, "start tts failed " + ret);
return false;
} else {
return true;
}
}
}

View File

@@ -0,0 +1,139 @@
package com.example.ai_assistant_plugin;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.idst.nui.AsrResult;
import com.alibaba.idst.nui.Constants;
import com.alibaba.idst.nui.INativeNuiCallback;
import com.alibaba.idst.nui.KwsResult;
import io.flutter.plugin.common.MethodChannel;
public class AsrCallBack implements INativeNuiCallback {
private static final String TAG = "AliAsr";
private final static int ASR_SAMPLE_RATE = 16000;
private final static int ASR_WAVE_FRAM_SIZE = 20 * 2 * 1 * ASR_SAMPLE_RATE / 1000; //20ms audio for 16k/16bit/mono
private AudioRecord asrAudioRecorder = null;
private MethodChannel channel;
private String asrText = "";
private Handler handler = new Handler(Looper.getMainLooper());
public AsrCallBack(MethodChannel channel) {
this.channel = channel;
}
@Override
public void onNuiEventCallback(Constants.NuiEvent event, final int resultCode,
final int arg2, KwsResult kwsResult,
AsrResult asrResult) {
Log.i(TAG, "event=" + event + " resultCode=" + resultCode);
if (event == Constants.NuiEvent.EVENT_TRANSCRIBER_STARTED) {
asrText = "";
} else if (event == Constants.NuiEvent.EVENT_TRANSCRIBER_COMPLETE) {
} else if (event == Constants.NuiEvent.EVENT_ASR_PARTIAL_RESULT) {
JSONObject jsonObject = JSON.parseObject(asrResult.allResponse);
JSONObject payload = jsonObject.getJSONObject("payload");
String result = payload.getString("result");
if (channel != null && result != null && !result.isBlank()) {
handler.post(() -> channel.invokeMethod("onAsrResult", asrText + result));
}
} else if (event == Constants.NuiEvent.EVENT_SENTENCE_END) {
JSONObject jsonObject = JSON.parseObject(asrResult.allResponse);
JSONObject payload = jsonObject.getJSONObject("payload");
String result = payload.getString("result");
if (channel != null && result != null && !result.isBlank()) {
asrText += result;
handler.post(() -> channel.invokeMethod("onAsrResult", asrText));
}
} else if (event == Constants.NuiEvent.EVENT_VAD_START) {
} else if (event == Constants.NuiEvent.EVENT_VAD_END) {
} else if (event == Constants.NuiEvent.EVENT_ASR_ERROR) {
} else if (event == Constants.NuiEvent.EVENT_MIC_ERROR) {
} else if (event == Constants.NuiEvent.EVENT_DIALOG_EX) { /* unused */
Log.i(TAG, "dialog extra message = " + asrResult.asrResult);
}
}
//当调用NativeNui的start后会一定时间反复回调该接口底层会提供buffer并告知这次需要数据的长度
//返回值告知底层读了多少数据应该尽量保证return的长度等于需要的长度如果返回<=0则表示出错
@Override
public int onNuiNeedAudioData(byte[] buffer, int len) {
if (asrAudioRecorder == null) {
return -1;
}
if (asrAudioRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "audio recorder not init");
return -1;
}
return asrAudioRecorder.read(buffer, 0, len);
}
//当录音状态发送变化的时候调用
@Override
public void onNuiAudioStateChanged(Constants.AudioState state) {
Log.i(TAG, "onNuiAudioStateChanged");
if (state == Constants.AudioState.STATE_OPEN) {
Log.i(TAG, "audio recorder start");
asrAudioRecorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
ASR_SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
ASR_WAVE_FRAM_SIZE * 4);
asrAudioRecorder.startRecording();
Log.i(TAG, "audio recorder start done");
} else if (state == Constants.AudioState.STATE_CLOSE) {
Log.i(TAG, "audio recorder close");
if (asrAudioRecorder != null) {
asrAudioRecorder.release();
}
} else if (state == Constants.AudioState.STATE_PAUSE) {
Log.i(TAG, "audio recorder pause");
if (asrAudioRecorder != null) {
asrAudioRecorder.stop();
}
}
}
@Override
public void onNuiAudioRMSChanged(float val) {
// Log.i(TAG, "onNuiAudioRMSChanged vol " + val);
}
@Override
public void onNuiVprEventCallback(Constants.NuiVprEvent event) {
Log.i(TAG, "onNuiVprEventCallback event " + event);
}
@Override
public void onNuiLogTrackCallback(Constants.LogLevel level, String log) {
Log.i(TAG, "onNuiLogTrackCallback log level:" + level + ", message -> " + log);
}
public void release() {
// 释放音频录制资源
if (asrAudioRecorder != null) {
try {
asrAudioRecorder.stop();
} catch (Exception e) {
Log.e(TAG, "release error", e);
}
asrAudioRecorder.release();
asrAudioRecorder = null;
}
channel = null;
asrText = null;
}
}

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant;
package com.example.ai_assistant_plugin;
import android.media.AudioFormat;
import android.media.AudioManager;

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant;
package com.example.ai_assistant_plugin;
public interface AudioPlayerCallback {
public void playStart();

View File

@@ -1,11 +1,10 @@
package com.example.ai_chat_assistant;
package com.example.ai_assistant_plugin;
import android.util.Log;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.example.ai_chat_assistant.token.AccessToken;
import com.example.ai_assistant_plugin.token.AccessToken;
import java.io.File;
import java.io.FileInputStream;

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant.token;
package com.example.ai_assistant_plugin.token;
import android.util.Log;

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant.token;
package com.example.ai_assistant_plugin.token;
import android.util.Log;

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant.token;
package com.example.ai_assistant_plugin.token;
/**
* Say something

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant.token;
package com.example.ai_assistant_plugin.token;
import android.util.Log;

View File

@@ -1,4 +1,4 @@
package com.example.ai_chat_assistant.token;
package com.example.ai_assistant_plugin.token;
import android.util.Base64;