Browse Source

WIP: import/export

develop-refactor
chodak166 5 months ago
parent
commit
091c317266
  1. 7
      app/src/app.rs
  2. 7
      app/src/config.rs
  3. 16
      app/src/container.rs
  4. 10
      app/src/main.rs
  5. 12
      example_dict.json
  6. 2
      lib/Cargo.toml
  7. 8
      lib/migrations/001_initial.sql
  8. 6
      lib/migrations/002_add_updated_at.sql
  9. 6
      lib/src/application/config.rs
  10. 2
      lib/src/application/errors.rs
  11. 1
      lib/src/application/services.rs
  12. 3
      lib/src/application/traits.rs
  13. 32
      lib/src/core/sys_major/encoder_tests.rs
  14. 2
      lib/src/infrastructure.rs
  15. 78
      lib/src/infrastructure/json_file_dict_source.rs
  16. 314
      lib/src/infrastructure/sqlite_dict_repository.rs
  17. 2
      lib/src/lib.rs
  18. 5
      lib/src/presentation/cli/cli_args.rs
  19. 1
      lib/src/presentation/cli/commands.rs
  20. 34
      lib/src/presentation/cli/commands/import_dict.rs
  21. 7
      lib/src/presentation/cli/defaults.rs

7
app/src/app.rs

@ -41,6 +41,13 @@ impl Application {
Command::Encode(_) => {
commands::encode::run(self.config.encoder).await;
}
Command::ImportDict(_) => {
commands::import_dict::run(
self.config.import_dict,
self.container.dict_repository.clone(),
)
.await?;
}
}
Ok(())
}

7
app/src/config.rs

@ -3,12 +3,13 @@ use config::{Config, Environment, File};
use serde::Deserialize;
use applib::cli::{Command, GlobalArgs, defaults::set_defaults};
use applib::config::{EncoderConfig, ServerConfig};
use applib::config::{EncoderConfig, ImportDictConfig, ServerConfig};
#[derive(Debug, Deserialize, Clone)]
pub struct AppConfig {
pub server: ServerConfig,
pub encoder: EncoderConfig,
pub import_dict: ImportDictConfig,
pub log_level: String,
}
@ -45,6 +46,10 @@ impl AppConfig {
}
builder = builder.set_override("encoder.input", cmd_args.input.clone())?;
}
Command::ImportDict(cmd_args) => {
builder = builder.set_override("import_dict.name", cmd_args.name.clone())?;
builder = builder.set_override("import_dict.path", cmd_args.path.clone())?;
}
}
builder

16
app/src/container.rs

@ -1,10 +1,20 @@
use crate::config::AppConfig;
use applib::application::services::DictImporter;
use applib::infrastructure::sqlite_dict_repository::SqliteDictRepository;
#[derive(Clone)]
pub struct Container {}
pub struct Container {
pub dict_repository: SqliteDictRepository,
}
impl Container {
pub async fn new(_: &AppConfig) -> anyhow::Result<Self> {
Ok(Self {})
pub async fn new(config: &AppConfig) -> anyhow::Result<Self> {
let dict_repository = SqliteDictRepository::new("sqlite:app.db").await?;
Ok(Self { dict_repository })
}
pub fn create_dict_importer(&self) -> DictImporter<SqliteDictRepository> {
DictImporter::new(&self.dict_repository)
}
}

10
app/src/main.rs

@ -11,13 +11,3 @@ async fn main() -> Result<()> {
app.run().await?;
Ok(())
}
// use applib::Greeter;
// use applib::infrastructure::std_greeting_printer::StdGreetingPrinter;
// use std::sync::Arc;
// fn main() {
// let printer = Arc::new(StdGreetingPrinter::new());
// let greeter = Greeter::new(printer);
// greeter.say_hello("John");
// }

12
example_dict.json

@ -0,0 +1,12 @@
[
{"word": "hello", "metadata": {"type": "greeting", "language": "english"}},
{"word": "world", "metadata": {"type": "noun", "language": "english"}},
{"word": "rust", "metadata": {"type": "programming_language", "paradigm": "systems"}},
{"word": "programming", "metadata": {"type": "verb", "context": "computing"}},
{"word": "database", "metadata": {"type": "noun", "context": "data_storage"}},
{"word": "sqlite", "metadata": {"type": "database_engine", "features": ["embedded", "sql"]}},
{"word": "json", "metadata": {"type": "data_format", "standard": "RFC 8259"}},
{"word": "import", "metadata": {"type": "verb", "context": "data_operations"}},
{"word": "dictionary", "metadata": {"type": "noun", "context": "reference"}},
{"word": "example", "metadata": {"type": "noun", "usage": "demonstration"}}
]

2
lib/Cargo.toml

@ -12,7 +12,9 @@ tracing = "0.1"
tokio = { version = "1.48", features = ["full"] }
anyhow = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = { version = "0.4", features = ["serde"] }
thiserror = "1.0"
async-trait = "0.1"
parking_lot = "0.12"
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "sqlite", "chrono", "migrate"] }

8
lib/migrations/001_initial.sql

@ -0,0 +1,8 @@
-- Create dicts table
CREATE TABLE IF NOT EXISTS dicts (
name TEXT PRIMARY KEY,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
-- Note: Individual dict entry tables will be created dynamically when dicts are created
-- This is because each dict needs its own table with the dict name in the table name

6
lib/migrations/002_add_updated_at.sql

@ -0,0 +1,6 @@
-- This migration adds the updated_at column to existing dict entry tables
-- Since dict entry tables are created dynamically, we need to handle this differently
-- For now, we'll drop and recreate existing tables to ensure they have the correct schema
-- Note: In a production environment, you would want to handle this more carefully
-- by checking each existing dict_entries_* table and adding the column if it doesn't exist

6
lib/src/application/config.rs

@ -11,3 +11,9 @@ pub struct EncoderConfig {
pub system: System,
pub input: String,
}
#[derive(Debug, Deserialize, Clone)]
pub struct ImportDictConfig {
pub name: String,
pub path: String,
}

2
lib/src/application/errors.rs

@ -11,3 +11,5 @@ impl std::fmt::Display for RepositoryError {
write!(f, "{:?}", self)
}
}
impl std::error::Error for RepositoryError {}

1
lib/src/application/services.rs

@ -1,5 +1,4 @@
use crate::application::traits::{DictRepository, DictSource};
use crate::core::entities::{Dict, DictEntry};
pub struct DictImporter<'a, R> {
repo: &'a R,

3
lib/src/application/traits.rs

@ -14,6 +14,9 @@ pub trait DictRepository {
limit: Option<u32>,
offset: Option<u32>,
) -> Result<Dict, RepositoryError>;
// Get the next available ID for a dictionary
fn get_next_id(&self, dict_name: &str) -> Result<u32, RepositoryError>;
}
pub trait DictSource {

32
lib/src/core/sys_major/encoder_tests.rs

@ -5,7 +5,7 @@ use crate::core::traits::SystemEncoder;
mod tests {
use super::*;
fn create_single_dict() -> Rules {
fn create_single_rules() -> Rules {
vec![Rule {
phoneme_in: "B".to_string(),
phoneme_out: "2".to_string(),
@ -16,7 +16,7 @@ mod tests {
}]
}
fn create_single_dict_min() -> Rules {
fn create_single_rules_min() -> Rules {
vec![Rule {
phoneme_in: "B".to_string(),
phoneme_out: "2".to_string(),
@ -24,7 +24,7 @@ mod tests {
}]
}
fn create_double_dict() -> Rules {
fn create_double_rules() -> Rules {
vec![
Rule {
phoneme_in: "CD".to_string(),
@ -44,90 +44,90 @@ mod tests {
#[test]
fn test_single_symbol_encoding_only_before_only_after_matched() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("ABC");
assert_eq!(output, "2")
}
#[test]
fn test_double_symbol_encoding_only_before_only_after_matched() {
let encoder = Encoder::new(create_double_dict());
let encoder = Encoder::new(create_double_rules());
let output = encoder.encode("ABCDEF");
assert_eq!(output, "2")
}
#[test]
fn test_single_symbol_encoding_only_before_not_matched_with_other() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("DBC");
assert_eq!(output, "")
}
#[test]
fn test_double_symbol_encoding_only_before_not_matched_with_other() {
let encoder = Encoder::new(create_double_dict());
let encoder = Encoder::new(create_double_rules());
let output = encoder.encode("AACDEE");
assert_eq!(output, "")
}
#[test]
fn test_case_insensitivity() {
let encoder = Encoder::new(create_double_dict());
let encoder = Encoder::new(create_double_rules());
let output = encoder.encode("abcdef");
assert_eq!(output, "2")
}
#[test]
fn test_single_symbol_encoding_only_before_not_matched_with_empty() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("BC");
assert_eq!(output, "")
}
#[test]
fn test_single_symbol_encoding_only_before_not_matched_with_not_before() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("XBC");
assert_eq!(output, "")
}
#[test]
fn test_single_symbol_encoding_only_after_not_matched_with_other() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("ABD");
assert_eq!(output, "")
}
#[test]
fn test_single_symbol_encoding_only_after_not_matched_with_empty() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("AB");
assert_eq!(output, "")
}
#[test]
fn test_single_symbol_encoding_only_after_not_matched_with_not_after() {
let encoder = Encoder::new(create_single_dict());
let encoder = Encoder::new(create_single_rules());
let output = encoder.encode("ABY");
assert_eq!(output, "")
}
#[test]
fn test_single_symbol_encoding_empty_before_after_matched_with_empty() {
let encoder = Encoder::new(create_single_dict_min());
let encoder = Encoder::new(create_single_rules_min());
let output = encoder.encode("B");
assert_eq!(output, "2")
}
#[test]
fn test_single_symbol_encoding_empty_before_after_matched_with_others() {
let encoder = Encoder::new(create_single_dict_min());
let encoder = Encoder::new(create_single_rules_min());
let output = encoder.encode("AXBYC");
assert_eq!(output, "2")
}
#[test]
fn test_encoding_multiple_phonemes() {
let encoder = Encoder::new(create_double_dict());
let encoder = Encoder::new(create_double_rules());
let output = encoder.encode("VvmNabCd33mn00CD22cdefmn");
assert_eq!(output, "32323")
}

2
lib/src/infrastructure.rs

@ -1 +1,3 @@
pub mod errors;
pub mod json_file_dict_source;
pub mod sqlite_dict_repository;

78
lib/src/infrastructure/json_file_dict_source.rs

@ -1,6 +1,7 @@
use crate::application::ports::DictSource;
use crate::core::entities::{DictEntry, DictEntryId};
use crate::application::traits::DictSource;
use crate::core::entities::DictEntry;
use serde::Deserialize;
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::path::Path;
@ -9,37 +10,76 @@ use std::path::Path;
// It exists ONLY here to map external JSON names to internal Entity names.
#[derive(Deserialize)]
struct JsonEntry {
id: u32,
word: String, // "word" in JSON, "text" in Entity
// If JSON has extra fields we don't care about, Serde ignores them.
word: String,
metadata: Option<HashMap<String, serde_json::Value>>,
}
pub struct JsonFileDictSource {
// Helper iterator from Serde
iter:
serde_json::StreamDeserializer<'static, serde_json::de::IoRead<BufReader<File>>, JsonEntry>,
entries: Vec<DictEntry>,
current_index: usize,
next_id: u32,
}
impl JsonFileDictSource {
pub fn new<P: AsRef<Path>>(path: P) -> anyhow::Result<Self> {
let file = File::open(path)?;
let reader = BufReader::new(file);
let iter = serde_json::Deserializer::from_reader(reader).into_iter::<JsonEntry>();
Ok(Self { iter })
// Parse as JSON array
let json_entries: Vec<JsonEntry> = serde_json::from_reader(reader)?;
// Convert to DictEntry with auto-generated IDs
let mut entries = Vec::new();
for (index, json_entry) in json_entries.into_iter().enumerate() {
let id = (index + 1) as u32; // Auto-generate ID starting from 1
// Convert metadata from serde_json::Value to HashMap<String, String>
let metadata = if let Some(meta) = json_entry.metadata {
meta.into_iter()
.map(|(k, v)| {
(
k,
match v {
serde_json::Value::String(s) => s,
_ => v.to_string(),
},
)
})
.collect()
} else {
HashMap::new()
};
entries.push(DictEntry {
id,
text: json_entry.word,
metadata,
});
}
let entries_len = entries.len();
Ok(Self {
entries,
current_index: 0,
next_id: (entries_len + 1) as u32,
})
}
pub fn new_with_existing_ids<P: AsRef<Path>>(path: P, start_id: u32) -> anyhow::Result<Self> {
let mut source = Self::new(path)?;
source.next_id = start_id;
Ok(source)
}
}
impl DictSource for JsonFileDictSource {
fn next_entry(&mut self) -> Option<Result<DictEntry, anyhow::Error>> {
self.iter.next().map(|res| {
match res {
Ok(json) => {
// MAPPING HAPPENS HERE.
// This is type-safe. If DictEntry::new signature changes, this breaks.
Ok(DictEntry::new(json.id as DictEntryId, json.word))
}
Err(e) => Err(anyhow::Error::new(e)),
if self.current_index < self.entries.len() {
let entry = self.entries[self.current_index].clone();
self.current_index += 1;
Some(Ok(entry))
} else {
None
}
})
}
}

314
lib/src/infrastructure/sqlite_dict_repository.rs

@ -0,0 +1,314 @@
use crate::application::errors::RepositoryError;
use crate::application::traits::DictRepository;
use crate::core::entities::{Dict, DictEntry, DictEntryId};
use sqlx::{Row, SqlitePool, sqlite::SqliteConnectOptions};
use std::collections::HashMap;
use std::str::FromStr;
#[derive(Clone)]
pub struct SqliteDictRepository {
pool: SqlitePool,
}
impl SqliteDictRepository {
pub async fn new(database_url: &str) -> Result<Self, RepositoryError> {
let options = SqliteConnectOptions::from_str(database_url)
.map_err(|_| RepositoryError::ConnectionFailed)?
.create_if_missing(true);
let pool = SqlitePool::connect_with(options)
.await
.map_err(|_| RepositoryError::ConnectionFailed)?;
// Run migrations
sqlx::migrate!("./migrations")
.run(&pool)
.await
.map_err(|e| RepositoryError::Unexpected(format!("Failed to run migrations: {}", e)))?;
Ok(Self { pool })
}
async fn ensure_dict_tables(&self, dict_name: &str) -> Result<(), RepositoryError> {
// Create dict table if not exists
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS dicts (
name TEXT PRIMARY KEY,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
"#,
)
.execute(&self.pool)
.await
.map_err(|_| RepositoryError::ConnectionFailed)?;
// Insert dict if not exists
sqlx::query("INSERT OR IGNORE INTO dicts (name) VALUES (?)")
.bind(dict_name)
.execute(&self.pool)
.await
.map_err(|_| RepositoryError::ConnectionFailed)?;
// Create entries table for this dict
let table_name = format!("dict_entries_{}", dict_name);
let create_table_sql = format!(
r#"
CREATE TABLE IF NOT EXISTS {} (
id INTEGER PRIMARY KEY,
text TEXT NOT NULL UNIQUE,
metadata TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
"#,
table_name
);
sqlx::query(&create_table_sql)
.execute(&self.pool)
.await
.map_err(|_| RepositoryError::ConnectionFailed)?;
Ok(())
}
pub fn get_next_id(&self, dict_name: &str) -> Result<u32, RepositoryError> {
let pool = self.pool.clone();
let dict_name = dict_name.to_string();
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
let table_name = format!("dict_entries_{}", dict_name);
let result: Option<i64> =
sqlx::query_scalar(&format!("SELECT MAX(id) FROM {}", table_name))
.fetch_one(&pool)
.await
.map_err(|_| RepositoryError::ConnectionFailed)?;
Ok(result.map(|id| id as u32 + 1).unwrap_or(1))
})
})
}
fn find_id_by_text(&self, dict_name: &str, text: &str) -> Result<Option<u32>, RepositoryError> {
let pool = self.pool.clone();
let dict_name = dict_name.to_string();
let text = text.to_string();
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
let table_name = format!("dict_entries_{}", dict_name);
let result: Option<i64> =
sqlx::query_scalar(&format!("SELECT id FROM {} WHERE text = ?", table_name))
.bind(&text)
.fetch_one(&pool)
.await
.map_err(|_| RepositoryError::ConnectionFailed)?;
Ok(result.map(|id| id as u32))
})
})
}
}
impl DictRepository for SqliteDictRepository {
fn create(&self, name: &str) -> Result<(), RepositoryError> {
// This is a synchronous method, but we need to run async operations
// In a real application, you might want to make the trait async or use a blocking executor
let pool = self.pool.clone();
let name = name.to_string();
// Use tokio's block_in_place to run async code in sync context
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
let mut tx = pool.begin().await.map_err(|e| {
RepositoryError::Unexpected(format!("Failed to begin transaction: {}", e))
})?;
// Create dict table
sqlx::query(
r#"
CREATE TABLE IF NOT EXISTS dicts (
name TEXT PRIMARY KEY,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
"#,
)
.execute(&mut *tx)
.await
.map_err(|e| {
RepositoryError::Unexpected(format!("Failed to create dicts table: {}", e))
})?;
// Insert dict
sqlx::query("INSERT OR IGNORE INTO dicts (name) VALUES (?)")
.bind(&name)
.execute(&mut *tx)
.await
.map_err(|e| {
RepositoryError::Unexpected(format!("Failed to insert dict: {}", e))
})?;
// Create entries table for this dict
let table_name = format!("dict_entries_{}", name);
let create_table_sql = format!(
r#"
CREATE TABLE IF NOT EXISTS {} (
id INTEGER PRIMARY KEY,
text TEXT NOT NULL UNIQUE,
metadata TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
"#,
table_name
);
sqlx::query(&create_table_sql)
.execute(&mut *tx)
.await
.map_err(|e| {
RepositoryError::Unexpected(format!(
"Failed to create entries table: {}",
e
))
})?;
tx.commit().await.map_err(|e| {
RepositoryError::Unexpected(format!("Failed to commit transaction: {}", e))
})?;
Ok(())
})
})
}
fn save_entries(&self, dict_name: &str, entries: &[DictEntry]) -> Result<(), RepositoryError> {
let pool = self.pool.clone();
let dict_name = dict_name.to_string();
let entries = entries.to_vec();
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
let table_name = format!("dict_entries_{}", dict_name);
for entry in entries {
let metadata_json = serde_json::to_string(&entry.metadata)
.map_err(|e| RepositoryError::InvalidData(e.to_string()))?;
// Check if entry with this text already exists
let existing_id: Option<i64> = sqlx::query_scalar(&format!(
"SELECT id FROM {} WHERE text = ?",
table_name
))
.bind(&entry.text)
.fetch_optional(&pool)
.await
.map_err(|e| RepositoryError::Unexpected(format!("Failed to check existing entry: {}", e)))?;
if let Some(id) = existing_id {
// Update existing entry
sqlx::query(&format!(
"UPDATE {} SET metadata = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?",
table_name
))
.bind(metadata_json)
.bind(id)
.execute(&pool)
.await
.map_err(|e| RepositoryError::Unexpected(format!("Failed to update entry: {}", e)))?;
} else {
// Insert new entry
sqlx::query(&format!(
"INSERT INTO {} (id, text, metadata) VALUES (?, ?, ?)",
table_name
))
.bind(entry.id as i64)
.bind(&entry.text)
.bind(metadata_json)
.execute(&pool)
.await
.map_err(|e| RepositoryError::Unexpected(format!("Failed to insert entry: {}", e)))?;
}
}
Ok(())
})
})
}
fn get_next_id(&self, dict_name: &str) -> Result<u32, RepositoryError> {
self.get_next_id(dict_name)
}
fn fetch_many(
&self,
name: &str,
limit: Option<u32>,
offset: Option<u32>,
) -> Result<Dict, RepositoryError> {
let pool = self.pool.clone();
let name = name.to_string();
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async move {
let table_name = format!("dict_entries_{}", name);
// Check if dict exists
let dict_exists: bool =
sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM dicts WHERE name = ?)")
.bind(&name)
.fetch_one(&pool)
.await
.map_err(|e| {
RepositoryError::Unexpected(format!(
"Failed to check dict exists: {}",
e
))
})?;
if !dict_exists {
return Err(RepositoryError::NotFound);
}
let mut query = format!("SELECT id, text, metadata FROM {}", table_name);
if let Some(offset_val) = offset {
query.push_str(&format!(" LIMIT {}", limit.unwrap_or(1000)));
query.push_str(&format!(" OFFSET {}", offset_val));
} else if let Some(limit_val) = limit {
query.push_str(&format!(" LIMIT {}", limit_val));
}
let rows = sqlx::query(&query).fetch_all(&pool).await.map_err(|e| {
RepositoryError::Unexpected(format!("Failed to fetch entries: {}", e))
})?;
let mut entries = HashMap::new();
for row in rows {
let id: i64 = row.get("id");
let text: String = row.get("text");
let metadata_json: Option<String> = row.get("metadata");
let metadata = if let Some(json) = metadata_json {
serde_json::from_str(&json)
.map_err(|e| RepositoryError::InvalidData(e.to_string()))?
} else {
HashMap::new()
};
let entry = DictEntry {
id: id as DictEntryId,
text,
metadata,
};
entries.insert(entry.id, entry);
}
Ok(Dict { name, entries })
})
})
}
}

2
lib/src/lib.rs

@ -1,4 +1,4 @@
mod application;
pub mod application;
mod core;
pub mod infrastructure;
mod presentation;

5
lib/src/presentation/cli/cli_args.rs

@ -29,6 +29,9 @@ pub enum Command {
/// Encode a word using given system
Encode(EncodeArgs),
/// Import dictionary
ImportDict(ImportDictArgs),
}
#[derive(ClapArgs, Debug, Clone)]
@ -49,7 +52,7 @@ pub struct EncodeArgs {
#[derive(ClapArgs, Debug, Clone)]
pub struct ImportDictArgs {
#[arg(long, help = defaults::HELP_IMPORT_DICT_NAME)]
pub name: Option<String>,
pub name: String,
#[arg(long, help = defaults::HELP_IMPORT_DICT_INPUT)]
pub path: String,

1
lib/src/presentation/cli/commands.rs

@ -1,2 +1,3 @@
pub mod encode;
pub mod import_dict;
pub mod server;

34
lib/src/presentation/cli/commands/import_dict.rs

@ -0,0 +1,34 @@
use crate::application::traits::DictRepository;
use crate::application::{config::ImportDictConfig, services::DictImporter};
use crate::infrastructure::json_file_dict_source::JsonFileDictSource;
use tracing::{debug, error, info};
pub async fn run<R: DictRepository>(
config: ImportDictConfig,
repository: R,
) -> Result<(), anyhow::Error> {
debug!("Importing dict with config {:?}", config);
info!(
"Starting import of dictionary '{}' from file '{}'",
config.name, config.path
);
// Create the JSON file source (will auto-generate IDs starting from 1)
let source = JsonFileDictSource::new(&config.path)?;
// Create the importer
let importer = DictImporter::new(&repository);
// Perform the import (this will call create() first)
match importer.import(&config.name, source) {
Ok(()) => {
info!("Successfully imported dictionary '{}'", config.name);
Ok(())
}
Err(e) => {
error!("Failed to import dictionary '{}': {}", config.name, e);
Err(e)
}
}
}

7
lib/src/presentation/cli/defaults.rs

@ -12,6 +12,8 @@ pub const HELP_PORT: &str = formatcp!("Override Port [default: {}]", PORT);
pub const HELP_LOG: &str = formatcp!("Override Log Level [default: {}]", LOG_LEVEL);
pub const HELP_ENC_SYSTEM: &str = formatcp!("System to use [default: {}]", SYSTEM_NAME);
pub const HELP_ENC_INPUT: &str = formatcp!("Text to encode");
pub const HELP_IMPORT_DICT_NAME: &str = formatcp!("Dictionary name");
pub const HELP_IMPORT_DICT_INPUT: &str = formatcp!("Dictionary file path");
pub fn set_defaults(
builder: ConfigBuilder<config::builder::DefaultState>,
@ -21,8 +23,9 @@ pub fn set_defaults(
// Server
.set_default("server.host", HOST)?
.set_default("server.port", PORT)?
// Encode
.set_default("encode.system", SYSTEM_NAME)
// Encoder
.set_default("encoder.system", SYSTEM_NAME)?
.set_default("encoder.input", "")
// Wrapping in Result
.map_err(|e| e.into())
}

Loading…
Cancel
Save