refactor: clean up migrations (#5187)
This commit is contained in:
parent
171b1e8c60
commit
a3e78dd563
@ -34,7 +34,6 @@ tauri-plugin-store = "2"
|
||||
hyper = { version = "0.14", features = ["server"] }
|
||||
reqwest = { version = "0.11", features = ["json", "blocking", "stream"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
rocksdb = "0.21"
|
||||
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", rev = "c1c4c9a0c9afbfbbf9eb42d6f8b00d8546fbdc2c", features = [
|
||||
"client",
|
||||
"transport-sse-client",
|
||||
|
||||
@ -1,122 +0,0 @@
|
||||
use rocksdb::{IteratorMode, DB};
|
||||
use std::collections::HashMap;
|
||||
use tauri::Manager;
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_legacy_browser_data(app: tauri::AppHandle) -> Result<HashMap<String, String>, String> {
|
||||
let mut path = app.path().data_dir().unwrap();
|
||||
|
||||
let app_name =
|
||||
std::env::var("APP_NAME").unwrap_or_else(|_| app.config().product_name.clone().unwrap());
|
||||
path.push(app_name);
|
||||
path.push("Local Storage");
|
||||
path.push("leveldb");
|
||||
// Check if the path exists
|
||||
if !path.exists() {
|
||||
log::info!("Path {:?} does not exist, skipping migration.", path);
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
let db = DB::open_default(path);
|
||||
match db {
|
||||
Ok(db) => {
|
||||
let iter = db.iterator(IteratorMode::Start);
|
||||
|
||||
let migration_kvs: HashMap<String, String> = HashMap::from([
|
||||
// Api Server
|
||||
(
|
||||
"_file://\0\u{1}apiServerHost".to_string(),
|
||||
"serverHost".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}apiServerPort".to_string(),
|
||||
"serverPort".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}apiServerCorsEnabled".to_string(),
|
||||
"corsEnabled".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}apiServerPrefix".to_string(),
|
||||
"apiPrefix".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}apiServerVerboseLogEnabled".to_string(),
|
||||
"verboseLogs".to_string(),
|
||||
),
|
||||
// Proxy
|
||||
(
|
||||
"_file://\0\u{1}proxyFeatureEnabled".to_string(),
|
||||
"proxyEnabled".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}httpsProxyFeature".to_string(),
|
||||
"proxyUrl".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}proxyPassword".to_string(),
|
||||
"proxyPassword".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}proxyUsername".to_string(),
|
||||
"proxyUsername".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}ignoreSSLFeature".to_string(),
|
||||
"proxyIgnoreSSL".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}verifyProxySSL".to_string(),
|
||||
"verifyProxySSL".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}verifyProxyHostSSL".to_string(),
|
||||
"verifyProxyHostSSL".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}verifyPeerSSL".to_string(),
|
||||
"verifyPeerSSL".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}verifyHostSSL".to_string(),
|
||||
"verifyHostSSL".to_string(),
|
||||
),
|
||||
("_file://\0\u{1}noProxy".to_string(), "noProxy".to_string()),
|
||||
// Analytics
|
||||
(
|
||||
"_file://\0\u{1}productAnalytic".to_string(),
|
||||
"productAnalytic".to_string(),
|
||||
),
|
||||
(
|
||||
"_file://\0\u{1}productAnalyticPrompt".to_string(),
|
||||
"productAnalyticPrompt".to_string(),
|
||||
),
|
||||
]);
|
||||
|
||||
let mut results = HashMap::new();
|
||||
|
||||
for item in iter {
|
||||
match item {
|
||||
Ok((key, value)) => {
|
||||
let key_str = String::from_utf8_lossy(&key).to_string();
|
||||
let value_str = String::from_utf8_lossy(&value).to_string();
|
||||
// log::info!("Key: {:?} | Value: {:?}", key_str, value_str);
|
||||
if let Some(new_key) = migration_kvs.get(&key_str) {
|
||||
log::info!("Migrating key {:?} to new key {:?}", key_str, new_key);
|
||||
|
||||
results.insert(new_key.to_string(), value_str.replace("\u{1}", ""));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error reading from DB: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
log::info!("Migration results: {:?}", results);
|
||||
Ok(results)
|
||||
}
|
||||
Err(e) => {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2,7 +2,6 @@ pub mod cmd;
|
||||
pub mod fs;
|
||||
pub mod hardware;
|
||||
pub mod mcp;
|
||||
pub mod migration;
|
||||
pub mod server;
|
||||
pub mod setup;
|
||||
pub mod state;
|
||||
|
||||
@ -22,7 +22,8 @@ pub fn run() {
|
||||
// when defining deep link schemes at runtime, you must also check `argv` here
|
||||
}));
|
||||
}
|
||||
builder.plugin(tauri_plugin_os::init())
|
||||
builder
|
||||
.plugin(tauri_plugin_os::init())
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
@ -57,7 +58,6 @@ pub fn run() {
|
||||
core::cmd::read_logs,
|
||||
core::cmd::change_app_data_folder,
|
||||
core::cmd::reset_cortex_restart_count,
|
||||
core::migration::get_legacy_browser_data,
|
||||
// MCP commands
|
||||
core::mcp::get_tools,
|
||||
core::mcp::call_tool,
|
||||
|
||||
@ -1,15 +1,10 @@
|
||||
import { useProductAnalytic } from '@/hooks/useAnalytic'
|
||||
import { useLocalApiServer } from '@/hooks/useLocalApiServer'
|
||||
import { useModelProvider } from '@/hooks/useModelProvider'
|
||||
import { useProxyConfig } from '@/hooks/useProxyConfig'
|
||||
import { ExtensionManager } from '@/lib/extension'
|
||||
import { configurePullOptions } from '@/services/models'
|
||||
import {
|
||||
EngineManagementExtension,
|
||||
Engines,
|
||||
ExtensionTypeEnum,
|
||||
} from '@janhq/core'
|
||||
import { invoke } from '@tauri-apps/api/core'
|
||||
|
||||
/**
|
||||
* Migrates legacy browser data to new browser session.
|
||||
@ -36,32 +31,7 @@ export const migrateData = async () => {
|
||||
checkExtensionManager()
|
||||
})
|
||||
try {
|
||||
// Migrate local storage data
|
||||
const oldData = await invoke('get_legacy_browser_data')
|
||||
for (const [key, value] of Object.entries(
|
||||
oldData as unknown as Record<string, string>
|
||||
)) {
|
||||
if (value !== null && value !== undefined) {
|
||||
if (Object.keys(useLocalApiServer.getState()).includes(key)) {
|
||||
useLocalApiServer.setState({
|
||||
...useLocalApiServer.getState(),
|
||||
[key]: value.replace(/"/g, ''),
|
||||
})
|
||||
} else if (Object.keys(useProxyConfig.getState()).includes(key)) {
|
||||
useProxyConfig.setState({
|
||||
...useProxyConfig.getState(),
|
||||
[key]: value.replace(/"/g, ''),
|
||||
})
|
||||
} else if (Object.keys(useProductAnalytic.getState()).includes(key)) {
|
||||
useProductAnalytic.setState({
|
||||
...useProductAnalytic.getState(),
|
||||
[key]: value.replace(/"/g, ''),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
// Migrate provider configurations
|
||||
|
||||
if (engines) {
|
||||
for (const [key, value] of Object.entries(engines)) {
|
||||
const providerName = key.replace('google_gemini', 'gemini')
|
||||
@ -91,7 +61,6 @@ export const migrateData = async () => {
|
||||
}
|
||||
}
|
||||
localStorage.setItem('migration_completed', 'true')
|
||||
configurePullOptions(useProxyConfig.getState())
|
||||
} catch (error) {
|
||||
console.error('Migration failed:', error)
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user