diff --git a/nucliadb/nucliadb/common/cluster/grpc_node_dummy.py b/nucliadb/nucliadb/common/cluster/grpc_node_dummy.py index f599a961be..796444e0c1 100644 --- a/nucliadb/nucliadb/common/cluster/grpc_node_dummy.py +++ b/nucliadb/nucliadb/common/cluster/grpc_node_dummy.py @@ -70,11 +70,11 @@ async def AddVectorSet(self, data): # pragma: no cover result = OpStatus() return result - async def ListVectorSet(self, data: ShardId): # pragma: no cover - self.calls.setdefault("ListVectorSet", []).append(data) + async def ListVectorSets(self, data: ShardId): # pragma: no cover + self.calls.setdefault("ListVectorSets", []).append(data) result = VectorSetList() result.shard.id = data.id - result.vectorset.append("base") + result.vectorsets.append("base") return result async def GC(self, request: ShardId) -> EmptyResponse: # pragma: no cover diff --git a/nucliadb_node/src/grpc/grpc_writer.rs b/nucliadb_node/src/grpc/grpc_writer.rs index 157e444d29..685a33b55d 100644 --- a/nucliadb_node/src/grpc/grpc_writer.rs +++ b/nucliadb_node/src/grpc/grpc_writer.rs @@ -26,7 +26,7 @@ use crate::grpc::collect_garbage::{garbage_collection_loop, GCParameters}; use crate::merge::{global_merger, MergePriority, MergeRequest, MergeWaiter}; use crate::settings::Settings; use crate::shards::metadata::ShardMetadata; -use crate::shards::writer::ShardWriter; +use crate::shards::writer::{NewVectorsIndex, ShardWriter}; use crate::telemetry::run_with_telemetry; use crate::utils::{get_primary_node_id, list_shards, read_host_key}; use nucliadb_core::metrics::get_metrics; @@ -38,7 +38,7 @@ use nucliadb_core::protos::{ ShardId, ShardIds, VectorSetId, VectorSetList, }; use nucliadb_core::tracing::{self, Span, *}; -use nucliadb_core::Channel; +use nucliadb_core::{Channel, NodeResult}; use object_store::path::Path; use std::sync::Arc; use std::time::Duration; @@ -280,16 +280,123 @@ impl NodeWriter for NodeWriterGRPCDriver { Ok(tonic::Response::new(status)) } - async fn add_vector_set(&self, _: Request) -> Result, Status> { - Err(tonic::Status::internal("Coming soon..")) + async fn add_vector_set(&self, request: Request) -> Result, Status> { + let span = Span::current(); + + let request = request.into_inner(); + let similarity = request.similarity(); + let normalize_vectors = request.normalize_vectors; + let Some(VectorSetId { + shard: Some(ShardId { + id: shard_id, + }), + vectorset, + }) = request.id + else { + return Ok(tonic::Response::new(OpStatus { + status: op_status::Status::Error.into(), + detail: "Vectorset ID must be provided".to_string(), + ..Default::default() + })); + }; + + let shards = Arc::clone(&self.shards); + let task = move || { + run_with_telemetry(info_span!(parent: &span, "Add a vectorset"), move || { + let shard = obtain_shard(shards, shard_id.clone())?; + shard.create_vectors_index(NewVectorsIndex { + shard_id, + name: vectorset, + channel: shard.metadata.channel(), + similarity, + normalize_vectors, + }) + }) + }; + let result = tokio::task::spawn_blocking(task) + .await + .map_err(|error| tonic::Status::internal(format!("Blocking task panicked: {error:?}")))?; + let status = match result { + Ok(()) => OpStatus { + status: op_status::Status::Ok.into(), + detail: "Vectorset successfully created".to_string(), + ..Default::default() + }, + Err(error) => OpStatus { + status: op_status::Status::Error.into(), + detail: error.to_string(), + ..Default::default() + }, + }; + Ok(tonic::Response::new(status)) } - async fn remove_vector_set(&self, _: Request) -> Result, Status> { - Err(tonic::Status::internal("Coming soon..")) + async fn remove_vector_set(&self, request: Request) -> Result, Status> { + let span = Span::current(); + + let VectorSetId { + shard: Some(ShardId { + id: shard_id, + }), + vectorset, + } = request.into_inner() + else { + return Ok(tonic::Response::new(OpStatus { + status: op_status::Status::Error.into(), + detail: "Vectorset ID must be provided".to_string(), + ..Default::default() + })); + }; + + let shards = Arc::clone(&self.shards); + let task = move || { + run_with_telemetry(info_span!(parent: &span, "Remove vectorset"), move || { + let shard = obtain_shard(shards, shard_id.clone())?; + shard.remove_vectors_index(vectorset) + }) + }; + let result = tokio::task::spawn_blocking(task) + .await + .map_err(|error| tonic::Status::internal(format!("Blocking task panicked: {error:?}")))?; + let status = match result { + Ok(()) => OpStatus { + status: op_status::Status::Ok.into(), + detail: "Vectorset successfully deleted".to_string(), + ..Default::default() + }, + Err(error) => OpStatus { + status: op_status::Status::Error.into(), + detail: error.to_string(), + ..Default::default() + }, + }; + Ok(tonic::Response::new(status)) } - async fn list_vector_sets(&self, _: Request) -> Result, Status> { - Err(tonic::Status::internal("Coming soon..")) + async fn list_vector_sets(&self, request: Request) -> Result, Status> { + let span = Span::current(); + + let shard_id = request.into_inner().id; + let shard_id_clone = shard_id.clone(); + let shards = Arc::clone(&self.shards); + let task = move || { + run_with_telemetry(info_span!(parent: &span, "Remove vectorset"), move || { + let shard = obtain_shard(shards, shard_id_clone)?; + Ok(shard.list_vectors_indexes()) + }) + }; + let result: NodeResult> = tokio::task::spawn_blocking(task) + .await + .map_err(|error| tonic::Status::internal(format!("Blocking task panicked: {error:?}")))?; + match result { + Ok(vectorsets) => Ok(tonic::Response::new(VectorSetList { + shard: Some(ShardId { + id: shard_id, + }), + vectorsets, + })), + Err(error) => Err(tonic::Status::internal(error.to_string())), + } } async fn get_metadata(&self, _request: Request) -> Result, Status> { diff --git a/nucliadb_node/src/shards/indexes.rs b/nucliadb_node/src/shards/indexes.rs index 816f0dcd68..c1d577a7b0 100644 --- a/nucliadb_node/src/shards/indexes.rs +++ b/nucliadb_node/src/shards/indexes.rs @@ -29,7 +29,7 @@ use uuid::Uuid; use crate::disk_structure; -pub const DEFAULT_VECTOR_INDEX_NAME: &str = "__default__"; +pub const DEFAULT_VECTORS_INDEX_NAME: &str = "__default__"; pub const MAX_ALLOWED_VECTORSETS: usize = 5; pub const SHARD_INDEXES_FILENAME: &str = "indexes.json"; pub const TEMP_SHARD_INDEXES_FILENAME: &str = "indexes.temp.json"; @@ -70,7 +70,7 @@ impl ShardIndexes { } pub fn vectors_path(&self) -> PathBuf { - self.vectorset_path(DEFAULT_VECTOR_INDEX_NAME).expect("Default vectors index should always be present") + self.vectorset_path(DEFAULT_VECTORS_INDEX_NAME).expect("Default vectors index should always be present") } pub fn vectorset_path(&self, name: &str) -> Option { @@ -83,17 +83,16 @@ impl ShardIndexes { // Vectorsets - #[allow(dead_code)] - /// Add a new vectorset to the index and returns it's path - pub fn add_vectorset(&mut self, name: String) -> NodeResult { + /// Add a new vectors index to the shard and returns it's path + pub fn add_vectors_index(&mut self, name: String) -> NodeResult { if self.inner.vectorsets.len() >= MAX_ALLOWED_VECTORSETS { return Err(node_error!(format!( "Max amount of allowed vectorsets reached: {}", self.inner.vectorsets.len() ))); } - if name == DEFAULT_VECTOR_INDEX_NAME { - return Err(node_error!(format!("Vectorset id {DEFAULT_VECTOR_INDEX_NAME} is reserved for internal use"))); + if name == DEFAULT_VECTORS_INDEX_NAME { + return Err(node_error!(format!("Vectorset id {DEFAULT_VECTORS_INDEX_NAME} is reserved for internal use"))); } if self.inner.vectorsets.contains_key(&name) { return Err(node_error!(format!("Vectorset id {name} is already in use"))); @@ -105,22 +104,24 @@ impl ShardIndexes { Ok(path) } - #[allow(dead_code)] - /// Removes a vectorset from the shard and returns the index path - pub fn remove_vectorset(&mut self, name: &str) -> NodeResult> { - if name == DEFAULT_VECTOR_INDEX_NAME { + /// Removes a vectors index from the shard and returns its path + pub fn remove_vectors_index(&mut self, name: &str) -> NodeResult> { + if name == DEFAULT_VECTORS_INDEX_NAME { return Err(node_error!(format!( - "Vectorset id {DEFAULT_VECTOR_INDEX_NAME} is reserved and can't be removed" + "Vectorset id {DEFAULT_VECTORS_INDEX_NAME} is reserved and can't be removed" ))); } let removed = self.inner.vectorsets.remove(name).map(|vectorset| self.shard_path.join(vectorset)); Ok(removed) } - #[allow(dead_code)] - pub fn iter_vectorsets(&self) -> impl Iterator + '_ { + pub fn iter_vectors_indexes(&self) -> impl Iterator + '_ { self.inner.vectorsets.iter().map(|(name, vectorset)| (name.to_owned(), self.shard_path.join(vectorset))) } + + pub fn count_vectors_indexes(&self) -> usize { + self.inner.vectorsets.len() + } } #[cfg_attr(test, derive(PartialEq))] @@ -135,7 +136,7 @@ struct ShardIndexesFile { impl ShardIndexesFile { pub fn load(shard_path: &Path) -> NodeResult { let mut reader = BufReader::new(File::open(shard_path.join(SHARD_INDEXES_FILENAME))?); - let indexes: ShardIndexesFile = serde_json::from_reader(&mut reader)?; + let indexes: Self = serde_json::from_reader(&mut reader)?; Ok(indexes) } @@ -155,7 +156,7 @@ impl Default for ShardIndexesFile { Self { texts: disk_structure::TEXTS_DIR.into(), paragraphs: disk_structure::PARAGRAPHS_DIR.into(), - vectorsets: HashMap::from([(DEFAULT_VECTOR_INDEX_NAME.to_string(), disk_structure::VECTORS_DIR.into())]), + vectorsets: HashMap::from([(DEFAULT_VECTORS_INDEX_NAME.to_string(), disk_structure::VECTORS_DIR.into())]), relations: disk_structure::RELATIONS_DIR.into(), } } @@ -191,18 +192,18 @@ mod tests { let mut indexes = ShardIndexes::new(shard_path); - let vectorsets = indexes.iter_vectorsets().collect::>(); + let vectorsets = indexes.iter_vectors_indexes().collect::>(); assert_eq!(vectorsets.len(), 1); - assert_eq!(vectorsets[0].0, DEFAULT_VECTOR_INDEX_NAME.to_string()); + assert_eq!(vectorsets[0].0, DEFAULT_VECTORS_INDEX_NAME.to_string()); assert_eq!(vectorsets[0].1, shard_path.join(disk_structure::VECTORS_DIR)); assert_eq!( - indexes.vectorset_path(DEFAULT_VECTOR_INDEX_NAME), + indexes.vectorset_path(DEFAULT_VECTORS_INDEX_NAME), Some(shard_path.join(disk_structure::VECTORS_DIR)) ); // Default vectorset can't be removed - assert!(indexes.remove_vectorset(DEFAULT_VECTOR_INDEX_NAME).is_err()); + assert!(indexes.remove_vectors_index(DEFAULT_VECTORS_INDEX_NAME).is_err()); } #[test] @@ -212,7 +213,7 @@ mod tests { let mut indexes = ShardIndexes::new(shard_path); - indexes.add_vectorset("gecko".to_string()).unwrap(); + indexes.add_vectors_index("gecko".to_string()).unwrap(); assert_eq!(indexes.texts_path(), shard_path.join(disk_structure::TEXTS_DIR)); assert_eq!(indexes.paragraphs_path(), shard_path.join(disk_structure::PARAGRAPHS_DIR)); @@ -233,13 +234,13 @@ mod tests { let mut indexes = ShardIndexes::new(shard_path); - indexes.add_vectorset("gecko".to_string()).unwrap(); - indexes.add_vectorset("openai".to_string()).unwrap(); + indexes.add_vectors_index("gecko".to_string()).unwrap(); + indexes.add_vectors_index("openai".to_string()).unwrap(); - let vectorsets = indexes.iter_vectorsets().sorted().collect::>(); + let vectorsets = indexes.iter_vectors_indexes().sorted().collect::>(); assert_eq!(vectorsets.len(), 3); - assert_eq!(vectorsets[0].0, DEFAULT_VECTOR_INDEX_NAME.to_string()); + assert_eq!(vectorsets[0].0, DEFAULT_VECTORS_INDEX_NAME.to_string()); assert_eq!(vectorsets[1].0, "gecko".to_string()); assert_eq!(vectorsets[1].1, indexes.vectorset_path("gecko").unwrap()); assert_eq!(vectorsets[2].0, "openai".to_string()); @@ -255,15 +256,15 @@ mod tests { // Add two vectorsets more - let added = indexes.add_vectorset("gecko".to_string()).is_ok(); + let added = indexes.add_vectors_index("gecko".to_string()).is_ok(); assert!(added); - let added = indexes.add_vectorset("openai".to_string()).is_ok(); + let added = indexes.add_vectors_index("openai".to_string()).is_ok(); assert!(added); - let vectorsets = indexes.iter_vectorsets().sorted().collect::>(); + let vectorsets = indexes.iter_vectors_indexes().sorted().collect::>(); assert_eq!(vectorsets.len(), 3); - assert_eq!(vectorsets[0].0, DEFAULT_VECTOR_INDEX_NAME.to_string()); + assert_eq!(vectorsets[0].0, DEFAULT_VECTORS_INDEX_NAME.to_string()); assert_eq!(vectorsets[1].0, "gecko".to_string()); assert_eq!(vectorsets[1].1, indexes.vectorset_path("gecko").unwrap()); assert_eq!(vectorsets[2].0, "openai".to_string()); @@ -271,11 +272,11 @@ mod tests { // Remove a regular vectorset - assert!(indexes.remove_vectorset("gecko").is_ok()); + assert!(indexes.remove_vectors_index("gecko").is_ok()); - let vectorsets = indexes.iter_vectorsets().sorted().collect::>(); + let vectorsets = indexes.iter_vectors_indexes().sorted().collect::>(); assert_eq!(vectorsets.len(), 2); - assert_eq!(vectorsets[0].0, DEFAULT_VECTOR_INDEX_NAME.to_string()); + assert_eq!(vectorsets[0].0, DEFAULT_VECTORS_INDEX_NAME.to_string()); assert_eq!(vectorsets[1].0, "openai".to_string()); assert_eq!(vectorsets[1].1, indexes.vectorset_path("openai").unwrap()); } @@ -289,8 +290,8 @@ mod tests { // Add two vectorsets more - assert!(indexes.add_vectorset("gecko".to_string()).is_ok()); - assert!(indexes.add_vectorset("gecko".to_string()).is_err()); + assert!(indexes.add_vectors_index("gecko".to_string()).is_ok()); + assert!(indexes.add_vectors_index("gecko".to_string()).is_err()); } #[test] @@ -301,8 +302,8 @@ mod tests { let mut indexes = ShardIndexes::new(shard_path); for i in 0..(MAX_ALLOWED_VECTORSETS - 1) { - assert!(indexes.add_vectorset(format!("vectorset-{i}")).is_ok()); + assert!(indexes.add_vectors_index(format!("vectorset-{i}")).is_ok()); } - assert!(indexes.add_vectorset("too-many".to_string()).is_err()); + assert!(indexes.add_vectors_index("too-many".to_string()).is_err()); } } diff --git a/nucliadb_node/src/shards/mod.rs b/nucliadb_node/src/shards/mod.rs index aeb99f219e..3a2eec6277 100644 --- a/nucliadb_node/src/shards/mod.rs +++ b/nucliadb_node/src/shards/mod.rs @@ -20,7 +20,7 @@ //! This module provides tools for managing shards // pub mod errors; -mod indexes; +pub mod indexes; pub mod metadata; pub mod shard_reader; pub mod shard_writer; diff --git a/nucliadb_node/src/shards/shard_reader.rs b/nucliadb_node/src/shards/shard_reader.rs index ecb9f8f273..4aa44f5a54 100644 --- a/nucliadb_node/src/shards/shard_reader.rs +++ b/nucliadb_node/src/shards/shard_reader.rs @@ -17,6 +17,7 @@ // along with this program. If not, see . use super::indexes::ShardIndexes; +use super::indexes::DEFAULT_VECTORS_INDEX_NAME; use super::metadata::ShardMetadata; use super::versioning::Versions; use crate::disk_structure::*; @@ -46,6 +47,7 @@ use nucliadb_procs::measure; use nucliadb_protos::nodereader::{RelationNodeFilter, RelationPrefixSearchResponse}; use nucliadb_protos::utils::relation_node::NodeType; use nucliadb_relations2::reader::HashedRelationNode; +use std::collections::HashMap; use std::collections::HashSet; use std::fs::{self, File}; use std::io::{BufReader, Read}; @@ -137,12 +139,13 @@ impl Iterator for ShardFileChunkIterator { pub struct ShardReader { pub id: String, pub metadata: ShardMetadata, - indexes: ShardIndexes, root_path: PathBuf, suffixed_root_path: String, text_reader: RwLock, paragraph_reader: RwLock, - vector_reader: RwLock, + // vector index searches are not intended to run in parallel, so we only + // need a lock for all of them + vector_readers: RwLock>, relation_reader: RwLock, versions: Versions, } @@ -198,7 +201,14 @@ impl ShardReader { let info = info_span!(parent: &span, "paragraph count"); let paragraph_task = || run_with_telemetry(info, || read_rw_lock(&self.paragraph_reader).count()); let info = info_span!(parent: &span, "vector count"); - let vector_task = || run_with_telemetry(info, || read_rw_lock(&self.vector_reader).count()); + let vector_task = || { + run_with_telemetry(info, || { + read_rw_lock(&self.vector_readers) + .get(DEFAULT_VECTORS_INDEX_NAME) + .expect("Default vectors index should never be deleted (yet)") + .count() + }) + }; let mut text_result = Ok(0); let mut paragraph_result = Ok(0); @@ -236,7 +246,10 @@ impl ShardReader { #[tracing::instrument(skip_all)] pub fn get_vectors_keys(&self) -> NodeResult> { - read_rw_lock(&self.vector_reader).stored_ids() + read_rw_lock(&self.vector_readers) + .get(DEFAULT_VECTORS_INDEX_NAME) + .expect("Default vectors index should never be deleted (yet)") + .stored_ids() } #[tracing::instrument(skip_all)] @@ -293,11 +306,10 @@ impl ShardReader { id, metadata, suffixed_root_path, - indexes, root_path: shard_path.to_path_buf(), text_reader: RwLock::new(fields.unwrap()), paragraph_reader: RwLock::new(paragraphs.unwrap()), - vector_reader: RwLock::new(vectors.unwrap()), + vector_readers: RwLock::new(HashMap::from([(DEFAULT_VECTORS_INDEX_NAME.to_string(), vectors.unwrap())])), relation_reader: RwLock::new(relations.unwrap()), versions, }) @@ -473,9 +485,8 @@ impl ShardReader { let vector_task = index_queries.vectors_request.map(|mut request| { request.id = search_id.clone(); let vectors_context = &index_queries.vectors_context; - let info = info_span!(parent: &span, "vector search"); - let task = move || read_rw_lock(&self.vector_reader).search(&request, vectors_context); - || run_with_telemetry(info, task) + let task = move || self.vectors_index_search(&request, vectors_context); + || run_with_telemetry(info_span!(parent: &span, "vector search"), task) }); let relation_task = index_queries.relations_request.map(|request| { @@ -597,7 +608,7 @@ impl ShardReader { let span = tracing::Span::current(); run_with_telemetry(info_span!(parent: &span, "vector reader search"), || { - read_rw_lock(&self.vector_reader).search(&search_request, &VectorsContext::default()) + self.vectors_index_search(&search_request, &VectorsContext::default()) }) } #[tracing::instrument(skip_all)] @@ -616,7 +627,10 @@ impl ShardReader { #[tracing::instrument(skip_all)] pub fn vector_count(&self) -> NodeResult { - read_rw_lock(&self.vector_reader).count() + read_rw_lock(&self.vector_readers) + .get(DEFAULT_VECTORS_INDEX_NAME) + .expect("Default vectors index should never be deleted (yet)") + .count() } #[tracing::instrument(skip_all)] @@ -625,13 +639,42 @@ impl ShardReader { } pub fn update(&self) -> NodeResult<()> { - let version = self.versions.vectors; - let path = self.indexes.vectors_path(); - let new_reader = open_vectors_reader(version, &path)?; - let mut writer = write_rw_lock(&self.vector_reader); - *writer = new_reader; + let shard_path = self.metadata.shard_path(); + // TODO: while we don't have all shards migrated, we still have to + // unwrap with a default + let indexes = ShardIndexes::load(&shard_path).unwrap_or_else(|_| ShardIndexes::new(&shard_path)); + + let mut updated_indexes = HashMap::with_capacity(indexes.count_vectors_indexes()); + for (vectorset, path) in indexes.iter_vectors_indexes() { + let new_reader = open_vectors_reader(self.versions.vectors, &path)?; + updated_indexes.insert(vectorset, new_reader); + } + let mut vector_indexes = write_rw_lock(&self.vector_readers); + *vector_indexes = updated_indexes; Ok(()) } + + fn vectors_index_search( + &self, + request: &VectorSearchRequest, + context: &VectorsContext, + ) -> NodeResult { + let vectorset = &request.vector_set; + if vectorset.is_empty() { + read_rw_lock(&self.vector_readers) + .get(DEFAULT_VECTORS_INDEX_NAME) + .expect("Default vectors index should never be deleted (yet)") + .search(request, context) + } else { + let vector_readers = read_rw_lock(&self.vector_readers); + let reader = vector_readers.get(vectorset); + if let Some(reader) = reader { + reader.search(request, context) + } else { + Err(node_error!("Vectorset '{vectorset}' not found")) + } + } + } } #[cfg(test)] diff --git a/nucliadb_node/src/shards/shard_writer.rs b/nucliadb_node/src/shards/shard_writer.rs index f1b3bd24ff..46f388c5d4 100644 --- a/nucliadb_node/src/shards/shard_writer.rs +++ b/nucliadb_node/src/shards/shard_writer.rs @@ -20,7 +20,6 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; -use nucliadb_core::paragraphs::*; use nucliadb_core::prelude::*; use nucliadb_core::protos::shard_created::{DocumentService, ParagraphService, RelationService, VectorService}; use nucliadb_core::protos::{Resource, ResourceId}; @@ -28,11 +27,13 @@ use nucliadb_core::relations::*; use nucliadb_core::texts::*; use nucliadb_core::tracing::{self, *}; use nucliadb_core::vectors::*; +use nucliadb_core::{paragraphs::*, Channel}; use nucliadb_core::{thread, IndexFiles}; use nucliadb_procs::measure; +use nucliadb_protos::utils::VectorSimilarity; use nucliadb_vectors::VectorErr; -use super::indexes::ShardIndexes; +use super::indexes::{ShardIndexes, DEFAULT_VECTORS_INDEX_NAME}; use super::metadata::ShardMetadata; use super::versioning::{self, Versions}; use crate::disk_structure::*; @@ -100,17 +101,17 @@ pub struct ShardWriter { pub metadata: Arc, pub id: String, pub path: PathBuf, - indexes: RwLock, + indexes: RwLock, versions: Versions, pub gc_lock: tokio::sync::Mutex<()>, // lock to be able to do GC or not } #[derive(Debug)] -struct InnerShardWriter { - text_writer: TextsWriterPointer, - paragraph_writer: ParagraphsWriterPointer, - vector_writer: VectorsWriterPointer, - relation_writer: RelationsWriterPointer, +struct ShardWriterIndexes { + texts_index: TextsWriterPointer, + paragraphs_index: ParagraphsWriterPointer, + vectors_indexes: HashMap, + relations_index: RelationsWriterPointer, } impl ShardWriter { @@ -223,11 +224,14 @@ impl ShardWriter { id: metadata.id(), path: metadata.shard_path(), metadata, - indexes: RwLock::new(InnerShardWriter { - text_writer: Box::new(fields.unwrap()), - paragraph_writer: Box::new(paragraphs.unwrap()), - vector_writer: Box::new(vectors.unwrap()), - relation_writer: Box::new(relations.unwrap()), + indexes: RwLock::new(ShardWriterIndexes { + texts_index: Box::new(fields.unwrap()), + paragraphs_index: Box::new(paragraphs.unwrap()), + vectors_indexes: HashMap::from([( + DEFAULT_VECTORS_INDEX_NAME.to_string(), + Box::new(vectors.unwrap()) as VectorsWriterPointer, + )]), + relations_index: Box::new(relations.unwrap()), }), versions, gc_lock: tokio::sync::Mutex::new(()), @@ -236,74 +240,121 @@ impl ShardWriter { #[measure(actor = "shard", metric = "open")] pub fn open(metadata: Arc) -> NodeResult { + let span = tracing::Span::current(); let shard_path = metadata.shard_path(); + + // fallback to default indexes while there are shards without the file let indexes = ShardIndexes::load(&shard_path).unwrap_or_else(|_| ShardIndexes::new(&shard_path)); - // TODO: this call will generate the shard indexes file, as a lazy - // migration. When every shard has the file, this line should be - // removed + // This call will generate the shard indexes file, as a lazy migration. + // TODO: When every shard has the file, this line should be removed indexes.store()?; + let versions_path = metadata.shard_path().join(VERSION_FILE); + let versions = Versions::load(&versions_path)?; + let tsc = TextConfig { path: indexes.texts_path(), }; + let text_task = || Some(open_texts_writer(versions.texts, &tsc)); + let info = info_span!(parent: &span, "Open texts index writer"); + let text_task = || run_with_telemetry(info, text_task); + let psc = ParagraphConfig { path: indexes.paragraphs_path(), }; + let paragraph_task = || Some(open_paragraphs_writer(versions.paragraphs, &psc)); + let info = info_span!(parent: &span, "Open paragraphs index writer"); + let paragraph_task = || run_with_telemetry(info, paragraph_task); + + let mut vector_tasks = vec![]; + for (name, path) in indexes.iter_vectors_indexes() { + let id = metadata.id(); + vector_tasks.push(move || Some((name, open_vectors_writer(versions.vectors, &path, id)))); + } + let rsc = RelationConfig { path: indexes.relations_path(), channel: metadata.channel(), }; - - let versions_path = metadata.shard_path().join(VERSION_FILE); - let versions = Versions::load(&versions_path)?; - - let text_task = || Some(open_texts_writer(versions.texts, &tsc)); - let paragraph_task = || Some(open_paragraphs_writer(versions.paragraphs, &psc)); - let vector_task = || Some(open_vectors_writer(versions.vectors, &indexes.vectors_path(), metadata.id())); + let info = info_span!(parent: &span, "Open relations index writer"); let relation_task = || Some(open_relations_writer(versions.relations, &rsc)); - - let span = tracing::Span::current(); - let info = info_span!(parent: &span, "text start"); - let text_task = || run_with_telemetry(info, text_task); - let info = info_span!(parent: &span, "paragraph start"); - let paragraph_task = || run_with_telemetry(info, paragraph_task); - let info = info_span!(parent: &span, "vector start"); - let vector_task = || run_with_telemetry(info, vector_task); - let info = info_span!(parent: &span, "relation start"); let relation_task = || run_with_telemetry(info, relation_task); let mut text_result = None; let mut paragraph_result = None; - let mut vector_result = None; + let mut vector_results = Vec::with_capacity(vector_tasks.len()); + for _ in 0..vector_tasks.len() { + vector_results.push(None); + } let mut relation_result = None; thread::scope(|s| { s.spawn(|_| text_result = text_task()); s.spawn(|_| paragraph_result = paragraph_task()); - s.spawn(|_| vector_result = vector_task()); + for (vector_task, vector_result) in vector_tasks.into_iter().zip(vector_results.iter_mut()) { + s.spawn(|_| *vector_result = vector_task()); + } s.spawn(|_| relation_result = relation_task()); }); - let fields = text_result.transpose()?; - let paragraphs = paragraph_result.transpose()?; - let vectors = vector_result.transpose()?; - let relations = relation_result.transpose()?; + let texts = text_result.unwrap()?; + let paragraphs = paragraph_result.unwrap()?; + let mut vectors = HashMap::with_capacity(vector_results.len()); + for result in vector_results { + let (name, vector_writer) = result.unwrap(); + vectors.insert(name, vector_writer?); + } + let relations = relation_result.unwrap()?; Ok(ShardWriter { id: metadata.id(), path: metadata.shard_path(), metadata, - indexes: RwLock::new(InnerShardWriter { - text_writer: fields.unwrap(), - paragraph_writer: paragraphs.unwrap(), - vector_writer: vectors.unwrap(), - relation_writer: relations.unwrap(), + indexes: RwLock::new(ShardWriterIndexes { + texts_index: texts, + paragraphs_index: paragraphs, + vectors_indexes: vectors, + relations_index: relations, }), versions, gc_lock: tokio::sync::Mutex::new(()), }) } + pub fn create_vectors_index(&self, new: NewVectorsIndex) -> NodeResult<()> { + let mut indexes = ShardIndexes::load(&self.metadata.shard_path())?; + let path = indexes.add_vectors_index(new.name.clone())?; + let vectors_writer = nucliadb_vectors::service::VectorWriterService::create(VectorConfig { + path, + shard_id: new.shard_id, + channel: new.channel, + similarity: new.similarity, + normalize_vectors: new.normalize_vectors, + })?; + indexes.store()?; + write_rw_lock(&self.indexes).vectors_indexes.insert(new.name, Box::new(vectors_writer)); + Ok(()) + } + + pub fn remove_vectors_index(&self, name: String) -> NodeResult<()> { + let mut indexes = ShardIndexes::load(&self.metadata.shard_path())?; + let path = indexes.remove_vectors_index(&name)?; + indexes.store()?; + write_rw_lock(&self.indexes).vectors_indexes.remove(&name); + if let Some(path) = path { + // Although there can be a reader with this index open, readers + // currently open all vectors index files so we rely on Linux not + // deleting the files until it closes the index. Readers then should + // be able to keep answering for that vectorset until closing it + std::fs::remove_dir_all(path)?; + } + Ok(()) + } + + pub fn list_vectors_indexes(&self) -> Vec { + read_rw_lock(&self.indexes).vectors_indexes.keys().cloned().collect::>() + } + #[measure(actor = "shard", metric = "set_resource")] #[tracing::instrument(skip_all)] pub fn set_resource(&self, mut resource: Resource) -> NodeResult<()> { @@ -311,60 +362,69 @@ impl ShardWriter { remove_invalid_labels(&mut resource); - let indexes: &mut InnerShardWriter = &mut write_rw_lock(&self.indexes); + let indexes: &mut ShardWriterIndexes = &mut write_rw_lock(&self.indexes); - let text_task = || { - debug!("Field service starts set_resource"); - let result = indexes.text_writer.set_resource(&resource); - debug!("Field service ends set_resource"); - result + let mut text_task = || { + run_with_telemetry(info_span!(parent: &span, "text set_resource"), || { + debug!("Field service starts set_resource"); + let result = indexes.texts_index.set_resource(&resource); + debug!("Field service ends set_resource"); + result + }) }; - let paragraph_task = || { - debug!("Paragraph service starts set_resource"); - let result = indexes.paragraph_writer.set_resource(&resource); - debug!("Paragraph service ends set_resource"); - result + let mut paragraph_task = || { + run_with_telemetry(info_span!(parent: &span, "paragraph set_resource"), || { + debug!("Paragraph service starts set_resource"); + let result = indexes.paragraphs_index.set_resource(&resource); + debug!("Paragraph service ends set_resource"); + result + }) }; - let vector_task = || { - debug!("Vector service starts set_resource"); - let result = indexes.vector_writer.set_resource(&resource); - debug!("Vector service ends set_resource"); - result - }; + let mut vector_tasks = vec![]; + for (_, vector_writer) in indexes.vectors_indexes.iter_mut() { + vector_tasks.push(|| { + run_with_telemetry(info_span!(parent: &span, "vector set_resource"), || { + debug!("Vector service starts set_resource"); + let result = vector_writer.set_resource(&resource); + debug!("Vector service ends set_resource"); + result + }) + }); + } - let relation_task = || { - debug!("Relation service starts set_resource"); - let result = indexes.relation_writer.set_resource(&resource); - debug!("Relation service ends set_resource"); - result + let mut relation_task = || { + run_with_telemetry(info_span!(parent: &span, "relation set_resource"), || { + debug!("Relation service starts set_resource"); + let result = indexes.relations_index.set_resource(&resource); + debug!("Relation service ends set_resource"); + result + }) }; - let info = info_span!(parent: &span, "text set_resource"); - let text_task = || run_with_telemetry(info, text_task); - let info = info_span!(parent: &span, "paragraph set_resource"); - let paragraph_task = || run_with_telemetry(info, paragraph_task); - let info = info_span!(parent: &span, "vector set_resource"); - let vector_task = || run_with_telemetry(info, vector_task); - let info = info_span!(parent: &span, "relation set_resource"); - let relation_task = || run_with_telemetry(info, relation_task); - let mut text_result = Ok(()); let mut paragraph_result = Ok(()); - let mut vector_result = Ok(()); + let mut vector_results = Vec::with_capacity(vector_tasks.len()); + for _ in 0..vector_tasks.len() { + vector_results.push(Ok(())); + } let mut relation_result = Ok(()); thread::scope(|s| { s.spawn(|_| text_result = text_task()); s.spawn(|_| paragraph_result = paragraph_task()); - s.spawn(|_| vector_result = vector_task()); + for (mut vector_task, vector_result) in vector_tasks.into_iter().zip(vector_results.iter_mut()) { + s.spawn(move |_| *vector_result = vector_task()); + } s.spawn(|_| relation_result = relation_task()); }); text_result?; paragraph_result?; - vector_result?; + for result in vector_results { + result? + } relation_result?; self.metadata.new_generation_id(); // VERY NAIVE, SHOULD BE DONE AFTER MERGE AS WELL @@ -376,40 +436,57 @@ impl ShardWriter { pub fn remove_resource(&self, resource: &ResourceId) -> NodeResult<()> { let span = tracing::Span::current(); - let indexes: &mut InnerShardWriter = &mut write_rw_lock(&self.indexes); - - let text_task = || indexes.text_writer.delete_resource(resource); + let indexes: &mut ShardWriterIndexes = &mut write_rw_lock(&self.indexes); - let paragraph_task = || indexes.paragraph_writer.delete_resource(resource); + let mut text_task = || { + run_with_telemetry(info_span!(parent: &span, "text remove"), || { + indexes.texts_index.delete_resource(resource) + }) + }; - let vector_task = || indexes.vector_writer.delete_resource(resource); + let mut paragraph_task = || { + run_with_telemetry(info_span!(parent: &span, "paragraph remove"), || { + indexes.paragraphs_index.delete_resource(resource) + }) + }; - let relation_task = || indexes.relation_writer.delete_resource(resource); + let mut vector_tasks = vec![]; + for (_, vector_writer) in indexes.vectors_indexes.iter_mut() { + vector_tasks.push(|| { + run_with_telemetry(info_span!(parent: &span, "vector remove"), || { + vector_writer.delete_resource(resource) + }) + }); + } - let info = info_span!(parent: &span, "text remove"); - let text_task = || run_with_telemetry(info, text_task); - let info = info_span!(parent: &span, "paragraph remove"); - let paragraph_task = || run_with_telemetry(info, paragraph_task); - let info = info_span!(parent: &span, "vector remove"); - let vector_task = || run_with_telemetry(info, vector_task); - let info = info_span!(parent: &span, "relation remove"); - let relation_task = || run_with_telemetry(info, relation_task); + let mut relation_task = || { + run_with_telemetry(info_span!(parent: &span, "relation remove"), || { + indexes.relations_index.delete_resource(resource) + }) + }; let mut text_result = Ok(()); let mut paragraph_result = Ok(()); - let mut vector_result = Ok(()); + let mut vector_results = Vec::with_capacity(vector_tasks.len()); + for _ in 0..vector_tasks.len() { + vector_results.push(Ok(())); + } let mut relation_result = Ok(()); thread::scope(|s| { s.spawn(|_| text_result = text_task()); s.spawn(|_| paragraph_result = paragraph_task()); - s.spawn(|_| vector_result = vector_task()); + for (mut vector_task, vector_result) in vector_tasks.into_iter().zip(vector_results.iter_mut()) { + s.spawn(move |_| *vector_result = vector_task()); + } s.spawn(|_| relation_result = relation_task()); }); text_result?; paragraph_result?; - vector_result?; + for result in vector_results { + result? + } relation_result?; self.metadata.new_generation_id(); @@ -420,59 +497,138 @@ impl ShardWriter { #[tracing::instrument(skip_all)] pub fn collect_garbage(&self) -> NodeResult { let _lock = self.gc_lock.blocking_lock(); - let result = write_rw_lock(&self.indexes).vector_writer.garbage_collection(); - match result { - Ok(()) => Ok(GarbageCollectorStatus::GarbageCollected), - Err(error) => match error.downcast_ref::() { - Some(VectorErr::WorkDelayed) => Ok(GarbageCollectorStatus::TryLater), - _ => Err(error), - }, + let indexes: &mut ShardWriterIndexes = &mut write_rw_lock(&self.indexes); + + let mut gc_results = Vec::with_capacity(indexes.vectors_indexes.len()); + for (_, vector_writer) in indexes.vectors_indexes.iter_mut() { + let result = vector_writer.garbage_collection(); + gc_results.push(result); } + + for result in gc_results { + if let Err(error) = result { + return match error.downcast_ref::() { + Some(VectorErr::WorkDelayed) => Ok(GarbageCollectorStatus::TryLater), + _ => Err(error), + }; + } + } + Ok(GarbageCollectorStatus::GarbageCollected) } #[tracing::instrument(skip_all)] pub fn force_garbage_collection(&self) -> NodeResult { let _lock = self.gc_lock.blocking_lock(); - let result = write_rw_lock(&self.indexes).vector_writer.force_garbage_collection(); - match result { - Ok(()) => Ok(GarbageCollectorStatus::GarbageCollected), - Err(error) => match error.downcast_ref::() { - Some(VectorErr::WorkDelayed) => Ok(GarbageCollectorStatus::TryLater), - _ => Err(error), - }, + let indexes: &mut ShardWriterIndexes = &mut write_rw_lock(&self.indexes); + + let mut gc_results = Vec::with_capacity(indexes.vectors_indexes.len()); + for (_, vector_writer) in indexes.vectors_indexes.iter_mut() { + let result = vector_writer.force_garbage_collection(); + gc_results.push(result); } + + for result in gc_results { + if let Err(error) = result { + return match error.downcast_ref::() { + Some(VectorErr::WorkDelayed) => Ok(GarbageCollectorStatus::TryLater), + _ => Err(error), + }; + } + } + Ok(GarbageCollectorStatus::GarbageCollected) } #[tracing::instrument(skip_all)] pub fn merge(&self, context: MergeContext) -> NodeResult { - let runner = read_rw_lock(&self.indexes).vector_writer.prepare_merge(context.parameters)?; - let Some(mut runner) = runner else { - return Ok(MergeMetrics { + let mut runners = HashMap::new(); + { + let indexes: &ShardWriterIndexes = &read_rw_lock(&self.indexes); + for (name, vectors_index) in indexes.vectors_indexes.iter() { + let runner = vectors_index.prepare_merge(context.parameters); + if let Ok(Some(runner)) = runner { + runners.insert(name.clone(), runner); + } + } + } + + // Running merge is costly, so we don't want a lock indexes while merging + let mut merge_results = HashMap::new(); + for (name, mut runner) in runners.into_iter() { + let result = runner.run(); + merge_results.insert(name, result); + } + + { + let indexes: &mut ShardWriterIndexes = &mut write_rw_lock(&self.indexes); + + let mut metrics = MergeMetrics { merged: 0, left: 0, - }); - }; - let merge_result = runner.run()?; - let metrics = write_rw_lock(&self.indexes).vector_writer.record_merge(merge_result, context.source)?; - self.metadata.new_generation_id(); + }; + let mut prepare_merge_errors = Vec::new(); + let mut record_merge_errors = Vec::new(); + for (name, vectors_index) in indexes.vectors_indexes.iter_mut() { + let result = merge_results.remove(name); + if result.is_none() { + // new index have been added while acquiring indexes a + // second time. Ignore + continue; + } + let result = result.unwrap(); + + if let Ok(merge_result) = result { + let recorded = vectors_index.record_merge(merge_result, context.source); + if let Ok(m) = recorded { + metrics.merged += m.merged; + metrics.left += m.left; + } else { + record_merge_errors.push(recorded); + } + } else { + prepare_merge_errors.push(result); + } + } + + // return one of the errors we found if there are + for error in prepare_merge_errors { + error?; + } + for error in record_merge_errors { + error?; + } - Ok(metrics) + Ok(metrics) + } } /// This must be used only by replication and should be /// deleted as soon as possible. #[tracing::instrument(skip_all)] pub fn reload(&self) -> NodeResult<()> { - write_rw_lock(&self.indexes).vector_writer.reload() + let indexes: &mut ShardWriterIndexes = &mut write_rw_lock(&self.indexes); + let mut results = Vec::with_capacity(indexes.vectors_indexes.len()); + for (_, vector_writer) in indexes.vectors_indexes.iter_mut() { + results.push(vector_writer.reload()); + } + for result in results { + result?; + } + Ok(()) } pub fn get_shard_segments(&self) -> NodeResult>> { let mut segments = HashMap::new(); - - segments.insert("paragraph".to_string(), read_rw_lock(&self.indexes).paragraph_writer.get_segment_ids()?); - segments.insert("text".to_string(), read_rw_lock(&self.indexes).text_writer.get_segment_ids()?); - segments.insert("vector".to_string(), read_rw_lock(&self.indexes).vector_writer.get_segment_ids()?); - segments.insert("relation".to_string(), read_rw_lock(&self.indexes).relation_writer.get_segment_ids()?); + let indexes: &ShardWriterIndexes = &read_rw_lock(&self.indexes); + + segments.insert("paragraph".to_string(), indexes.paragraphs_index.get_segment_ids()?); + segments.insert("text".to_string(), indexes.texts_index.get_segment_ids()?); + // TODO: return segments for all vector indexes + let default_vectors_index = indexes + .vectors_indexes + .get(DEFAULT_VECTORS_INDEX_NAME) + .expect("Default vectors index should never be deleted (yet)"); + segments.insert("vector".to_string(), default_vectors_index.get_segment_ids()?); + segments.insert("relation".to_string(), indexes.relations_index.get_segment_ids()?); Ok(segments) } @@ -486,13 +642,18 @@ impl ShardWriter { // while we retrieve the list of files let indexes = write_rw_lock(&self.indexes); let paragraph_files = - indexes.paragraph_writer.get_index_files(ignored_segement_ids.get("paragraph").unwrap_or(&Vec::new()))?; + indexes.paragraphs_index.get_index_files(ignored_segement_ids.get("paragraph").unwrap_or(&Vec::new()))?; let text_files = - indexes.text_writer.get_index_files(ignored_segement_ids.get("text").unwrap_or(&Vec::new()))?; + indexes.texts_index.get_index_files(ignored_segement_ids.get("text").unwrap_or(&Vec::new()))?; + // TODO: return files for all vector indexes + let default_vectors_index = indexes + .vectors_indexes + .get(DEFAULT_VECTORS_INDEX_NAME) + .expect("Default vectors index should never be deleted (yet)"); let vector_files = - indexes.vector_writer.get_index_files(ignored_segement_ids.get("vector").unwrap_or(&Vec::new()))?; + default_vectors_index.get_index_files(ignored_segement_ids.get("vector").unwrap_or(&Vec::new()))?; let relation_files = - indexes.relation_writer.get_index_files(ignored_segement_ids.get("relation").unwrap_or(&Vec::new()))?; + indexes.relations_index.get_index_files(ignored_segement_ids.get("relation").unwrap_or(&Vec::new()))?; files.push((PathBuf::from(PARAGRAPHS_DIR), paragraph_files)); files.push((PathBuf::from(TEXTS_DIR), text_files)); files.push((PathBuf::from(VECTORS_DIR), vector_files)); @@ -501,6 +662,14 @@ impl ShardWriter { } } +pub struct NewVectorsIndex { + pub shard_id: String, + pub name: String, + pub channel: Channel, + pub similarity: VectorSimilarity, + pub normalize_vectors: bool, +} + pub enum GarbageCollectorStatus { GarbageCollected, TryLater, diff --git a/nucliadb_node/tests/test_date_range_search.rs b/nucliadb_node/tests/test_date_range_search.rs index 9103ad6ce3..47107e4a3c 100644 --- a/nucliadb_node/tests/test_date_range_search.rs +++ b/nucliadb_node/tests/test_date_range_search.rs @@ -28,7 +28,7 @@ use nucliadb_protos::prost_types::Timestamp; use nucliadb_protos::resource::ResourceStatus; use nucliadb_protos::{ IndexMetadata, IndexParagraph, IndexParagraphs, NewShardRequest, ReleaseChannel, Resource, ResourceId, - SearchRequest, TextInformation, Timestamps, VectorSentence, + SearchRequest, TextInformation, Timestamps, VectorSentence, VectorsetSentences, }; use rstest::*; use tonic::Request; @@ -65,7 +65,13 @@ async fn populate(writer: &mut TestNodeWriter, shard_id: String, metadata: Index let paragraph = IndexParagraph { start: 0, end: 0, - sentences, + sentences: sentences.clone(), + vectorsets_sentences: HashMap::from([( + "__default__".to_string(), + VectorsetSentences { + sentences, + }, + )]), field: field_id.clone(), labels: vec![], index: 3, diff --git a/nucliadb_node/tests/test_vector_normalization.rs b/nucliadb_node/tests/test_vector_normalization.rs index 3adb8d42b7..cdf4e36707 100644 --- a/nucliadb_node/tests/test_vector_normalization.rs +++ b/nucliadb_node/tests/test_vector_normalization.rs @@ -85,7 +85,6 @@ async fn test_vector_normalization_shard( assert!(results.vector.is_some()); let vector_results = results.vector.unwrap(); - println!("{vector_results:#?}"); assert_eq!(vector_results.documents.len(), 20); let scores = vector_results.documents.iter().map(|result| result.score).collect::>(); assert!(scores.iter().all(|score| *score == scores[0])); diff --git a/nucliadb_node/tests/test_vectorsets.rs b/nucliadb_node/tests/test_vectorsets.rs new file mode 100644 index 0000000000..da5299b471 --- /dev/null +++ b/nucliadb_node/tests/test_vectorsets.rs @@ -0,0 +1,157 @@ +// Copyright (C) 2021 Bosutech XXI S.L. +// +// nucliadb is offered under the AGPL v3.0 and as commercial software. +// For commercial licensing, contact us at info@nuclia.com. +// +// AGPL: +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . +// + +mod common; + +use common::resources as test_resources; +use common::NodeFixture; +use nucliadb_core::protos::op_status::Status; +use nucliadb_core::protos::{NewShardRequest, ReleaseChannel}; +use nucliadb_node::shards::indexes::DEFAULT_VECTORS_INDEX_NAME; +use nucliadb_protos::noderesources::ResourceId; +use nucliadb_protos::noderesources::ShardId; +use nucliadb_protos::noderesources::VectorSetId; +use nucliadb_protos::nodewriter::NewVectorSetRequest; +use nucliadb_protos::utils::VectorSimilarity; +use rstest::*; +use tonic::Request; + +#[rstest] +#[tokio::test] +async fn test_vectorsets( + #[values(ReleaseChannel::Stable, ReleaseChannel::Experimental)] release_channel: ReleaseChannel, +) -> Result<(), Box> { + let mut fixture = NodeFixture::new(); + fixture.with_writer().await?.with_reader().await?; + let mut writer = fixture.writer_client(); + let _reader = fixture.reader_client(); + + let response = writer + .new_shard(Request::new(NewShardRequest { + release_channel: release_channel.into(), + ..Default::default() + })) + .await?; + let shard_id = &response.get_ref().id; + + let response = writer + .list_vector_sets(ShardId { + id: shard_id.clone(), + }) + .await?; + assert_eq!( + response.get_ref().shard, + Some(ShardId { + id: shard_id.clone() + }) + ); + assert_eq!(response.get_ref().vectorsets, vec![DEFAULT_VECTORS_INDEX_NAME.to_string()]); + + let vectorset = "gecko".to_string(); + + let response = writer + .add_vector_set(Request::new(NewVectorSetRequest { + id: Some(VectorSetId { + shard: Some(ShardId { + id: shard_id.clone(), + }), + vectorset: vectorset.clone(), + }), + similarity: VectorSimilarity::Dot.into(), + normalize_vectors: true, + })) + .await?; + assert_eq!(response.get_ref().status(), Status::Ok); + + let response = writer + .list_vector_sets(ShardId { + id: shard_id.clone(), + }) + .await?; + assert_eq!( + response.get_ref().shard, + Some(ShardId { + id: shard_id.clone() + }) + ); + let mut vectorsets = response.get_ref().vectorsets.clone(); + vectorsets.sort(); + let mut expected = vec![DEFAULT_VECTORS_INDEX_NAME.to_string(), vectorset.clone()]; + expected.sort(); + assert_eq!(vectorsets, expected); + + // Work with multiple vectorsets + let resource = test_resources::little_prince(shard_id); + let rid = resource.resource.as_ref().unwrap().uuid.clone(); + + let response = writer.set_resource(resource).await?; + assert_eq!(response.get_ref().status(), Status::Ok); + + let response = writer.set_resource(test_resources::people_and_places(shard_id)).await?; + assert_eq!(response.get_ref().status(), Status::Ok); + + let response = writer + .remove_resource(ResourceId { + shard_id: shard_id.clone(), + uuid: rid.clone(), + }) + .await?; + assert_eq!(response.get_ref().status(), Status::Ok); + + // Removal of the default vectorset is not allowed (yet) + let response = writer + .remove_vector_set(VectorSetId { + shard: Some(ShardId { + id: shard_id.clone(), + }), + vectorset: DEFAULT_VECTORS_INDEX_NAME.to_string(), + }) + .await?; + assert_eq!(response.get_ref().status(), Status::Error); + assert!(response.get_ref().detail.contains("is reserved and can't be removed")); + + // A user-created vectorset can be deleted + let response = writer + .remove_vector_set(VectorSetId { + shard: Some(ShardId { + id: shard_id.clone(), + }), + vectorset: vectorset.clone(), + }) + .await?; + assert_eq!(response.get_ref().status(), Status::Ok); + + let response = writer + .list_vector_sets(ShardId { + id: shard_id.clone(), + }) + .await?; + assert_eq!( + response.get_ref().shard, + Some(ShardId { + id: shard_id.clone() + }) + ); + assert_eq!(response.get_ref().vectorsets, vec![DEFAULT_VECTORS_INDEX_NAME.to_string()]); + + // TODO: to be continued + + Ok(()) +} diff --git a/nucliadb_paragraphs2/src/reader.rs b/nucliadb_paragraphs2/src/reader.rs index bd3d970c09..29be2b1f4c 100644 --- a/nucliadb_paragraphs2/src/reader.rs +++ b/nucliadb_paragraphs2/src/reader.rs @@ -515,6 +515,7 @@ mod tests { start: 0, end: DOC1_P1.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/e/myentity".to_string()], index: 0, @@ -528,6 +529,7 @@ mod tests { start: DOC1_P1.len() as i32, end: (DOC1_P1.len() + DOC1_P2.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/tantivy".to_string(), "/test".to_string(), "/label1".to_string()], index: 1, @@ -541,6 +543,7 @@ mod tests { start: (DOC1_P1.len() + DOC1_P2.len()) as i32, end: (DOC1_P1.len() + DOC1_P2.len() + DOC1_P3.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/three".to_string(), "/label2".to_string()], index: 2, @@ -564,6 +567,7 @@ mod tests { start: 0, end: DOC1_TI.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "title".to_string(), labels: vec!["/c/ool".to_string()], index: 3, diff --git a/nucliadb_paragraphs2/src/writer.rs b/nucliadb_paragraphs2/src/writer.rs index deba0d96c5..278edd8183 100644 --- a/nucliadb_paragraphs2/src/writer.rs +++ b/nucliadb_paragraphs2/src/writer.rs @@ -365,6 +365,7 @@ mod tests { start: 0, end: DOC1_P1.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/nsfw".to_string()], index: 0, @@ -378,6 +379,7 @@ mod tests { start: DOC1_P1.len() as i32, end: (DOC1_P1.len() + DOC1_P2.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/tantivy".to_string(), "/test".to_string(), "/label1".to_string()], index: 1, @@ -391,6 +393,7 @@ mod tests { start: (DOC1_P1.len() + DOC1_P2.len()) as i32, end: (DOC1_P1.len() + DOC1_P2.len() + DOC1_P3.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/three".to_string(), "/label2".to_string()], index: 2, @@ -414,6 +417,7 @@ mod tests { start: 0, end: DOC1_TI.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "title".to_string(), labels: vec!["/cool".to_string()], index: 3, diff --git a/nucliadb_paragraphs3/src/reader.rs b/nucliadb_paragraphs3/src/reader.rs index 63982bea38..017453c71a 100644 --- a/nucliadb_paragraphs3/src/reader.rs +++ b/nucliadb_paragraphs3/src/reader.rs @@ -515,6 +515,7 @@ mod tests { start: 0, end: DOC1_P1.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/e/myentity".to_string()], index: 0, @@ -528,6 +529,7 @@ mod tests { start: DOC1_P1.len() as i32, end: (DOC1_P1.len() + DOC1_P2.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/tantivy".to_string(), "/test".to_string(), "/label1".to_string()], index: 1, @@ -541,6 +543,7 @@ mod tests { start: (DOC1_P1.len() + DOC1_P2.len()) as i32, end: (DOC1_P1.len() + DOC1_P2.len() + DOC1_P3.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/three".to_string(), "/label2".to_string()], index: 2, @@ -564,6 +567,7 @@ mod tests { start: 0, end: DOC1_TI.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "title".to_string(), labels: vec!["/c/ool".to_string()], index: 3, diff --git a/nucliadb_paragraphs3/src/writer.rs b/nucliadb_paragraphs3/src/writer.rs index 7dfa54f094..1473a9c948 100644 --- a/nucliadb_paragraphs3/src/writer.rs +++ b/nucliadb_paragraphs3/src/writer.rs @@ -345,6 +345,7 @@ mod tests { start: 0, end: DOC1_P1.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/nsfw".to_string()], index: 0, @@ -358,6 +359,7 @@ mod tests { start: DOC1_P1.len() as i32, end: (DOC1_P1.len() + DOC1_P2.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/tantivy".to_string(), "/test".to_string(), "/label1".to_string()], index: 1, @@ -371,6 +373,7 @@ mod tests { start: (DOC1_P1.len() + DOC1_P2.len()) as i32, end: (DOC1_P1.len() + DOC1_P2.len() + DOC1_P3.len()) as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "body".to_string(), labels: vec!["/three".to_string(), "/label2".to_string()], index: 2, @@ -394,6 +397,7 @@ mod tests { start: 0, end: DOC1_TI.len() as i32, sentences: HashMap::new(), + vectorsets_sentences: HashMap::new(), field: "title".to_string(), labels: vec!["/cool".to_string()], index: 3, diff --git a/nucliadb_protos/noderesources.proto b/nucliadb_protos/noderesources.proto index b4bea96e45..7eae30f924 100644 --- a/nucliadb_protos/noderesources.proto +++ b/nucliadb_protos/noderesources.proto @@ -99,6 +99,10 @@ message VectorSentence { SentenceMetadata metadata = 9; } +message VectorsetSentences { + map sentences = 1; // key is full id for vectors +} + message ParagraphMetadata { Position position = 1; bool page_with_visual = 2; @@ -110,6 +114,7 @@ message IndexParagraph { int32 end = 2; // Start end position in field text repeated string labels = 3; // Paragraph specific labels map sentences = 4; // key is full id for vectors + map vectorsets_sentences = 10; // key is vectorset id string field = 5; string split = 6; // split were it belongs uint64 index = 7; @@ -124,7 +129,7 @@ message VectorSetID { message VectorSetList { ShardId shard = 1; - repeated string vectorset = 2; + repeated string vectorsets = 2; } message IndexParagraphs { @@ -166,8 +171,8 @@ message Resource { string shard_id = 11; - map vectors = 12; // vectorset is the key - map vectors_to_delete = 13; // Vectorset prefix vector id + map vectors = 12 [deprecated = true]; // vectorset is the key + map vectors_to_delete = 13 [deprecated = true]; // Vectorset prefix vector id optional utils.Security security = 14; } diff --git a/nucliadb_protos/nodewriter.proto b/nucliadb_protos/nodewriter.proto index 8d3cdc8e89..9ef231799f 100644 --- a/nucliadb_protos/nodewriter.proto +++ b/nucliadb_protos/nodewriter.proto @@ -64,6 +64,8 @@ message NewShardRequest { message NewVectorSetRequest { noderesources.VectorSetID id = 1; utils.VectorSimilarity similarity = 2; + // indicates whether the shard should normalize vectors on indexing or not + bool normalize_vectors = 3; } message MergeResponse { diff --git a/nucliadb_protos/python/nucliadb_protos/audit_pb2.py b/nucliadb_protos/python/nucliadb_protos/audit_pb2.py index c9ecacee36..f31d52e802 100644 --- a/nucliadb_protos/python/nucliadb_protos/audit_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/audit_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/audit.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/audit_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/audit_pb2.pyi index e2985fef12..ea46068959 100644 --- a/nucliadb_protos/python/nucliadb_protos/audit_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/audit_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -45,7 +44,7 @@ DASHBOARD: ClientType.ValueType # 4 CHROME_EXTENSION: ClientType.ValueType # 5 global___ClientType = ClientType -@typing.final +@typing_extensions.final class AuditField(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -53,7 +52,7 @@ class AuditField(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FieldActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AuditField._FieldAction.ValueType], builtins.type): + class _FieldActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AuditField._FieldAction.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor ADDED: AuditField._FieldAction.ValueType # 0 MODIFIED: AuditField._FieldAction.ValueType # 1 @@ -87,11 +86,11 @@ class AuditField(google.protobuf.message.Message): field_type: nucliadb_protos.resources_pb2.FieldType.ValueType = ..., filename: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "field_id", b"field_id", "field_type", b"field_type", "filename", b"filename", "size", b"size", "size_delta", b"size_delta"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["action", b"action", "field_id", b"field_id", "field_type", b"field_type", "filename", b"filename", "size", b"size", "size_delta", b"size_delta"]) -> None: ... global___AuditField = AuditField -@typing.final +@typing_extensions.final class AuditKBCounter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -105,11 +104,11 @@ class AuditKBCounter(google.protobuf.message.Message): paragraphs: builtins.int = ..., fields: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["fields", b"fields", "paragraphs", b"paragraphs"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields", "paragraphs", b"paragraphs"]) -> None: ... global___AuditKBCounter = AuditKBCounter -@typing.final +@typing_extensions.final class ChatContext(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -123,11 +122,11 @@ class ChatContext(google.protobuf.message.Message): author: builtins.str = ..., text: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["author", b"author", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["author", b"author", "text", b"text"]) -> None: ... global___ChatContext = ChatContext -@typing.final +@typing_extensions.final class ChatAudit(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -139,9 +138,9 @@ class ChatAudit(google.protobuf.message.Message): question: builtins.str answer: builtins.str rephrased_question: builtins.str - learning_id: builtins.str @property def context(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ChatContext]: ... + learning_id: builtins.str def __init__( self, *, @@ -151,16 +150,16 @@ class ChatAudit(google.protobuf.message.Message): context: collections.abc.Iterable[global___ChatContext] | None = ..., learning_id: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_answer", b"_answer", "_rephrased_question", b"_rephrased_question", "answer", b"answer", "rephrased_question", b"rephrased_question"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_answer", b"_answer", "_rephrased_question", b"_rephrased_question", "answer", b"answer", "context", b"context", "learning_id", b"learning_id", "question", b"question", "rephrased_question", b"rephrased_question"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_answer", b"_answer", "_rephrased_question", b"_rephrased_question", "answer", b"answer", "rephrased_question", b"rephrased_question"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_answer", b"_answer", "_rephrased_question", b"_rephrased_question", "answer", b"answer", "context", b"context", "learning_id", b"learning_id", "question", b"question", "rephrased_question", b"rephrased_question"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_answer", b"_answer"]) -> typing.Literal["answer"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_answer", b"_answer"]) -> typing_extensions.Literal["answer"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_rephrased_question", b"_rephrased_question"]) -> typing.Literal["rephrased_question"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_rephrased_question", b"_rephrased_question"]) -> typing_extensions.Literal["rephrased_question"] | None: ... global___ChatAudit = ChatAudit -@typing.final +@typing_extensions.final class AuditRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -168,7 +167,7 @@ class AuditRequest(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _AuditTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AuditRequest._AuditType.ValueType], builtins.type): + class _AuditTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AuditRequest._AuditType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor VISITED: AuditRequest._AuditType.ValueType # 0 MODIFIED: AuditRequest._AuditType.ValueType # 1 @@ -218,28 +217,28 @@ class AuditRequest(google.protobuf.message.Message): type: global___AuditRequest.AuditType.ValueType kbid: builtins.str userid: builtins.str - timeit: builtins.float - origin: builtins.str - rid: builtins.str - task: builtins.str - resources: builtins.int - client_type: global___ClientType.ValueType - trace_id: builtins.str - success: builtins.bool @property def time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property def fields(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property def search(self) -> nucliadb_protos.nodereader_pb2.SearchRequest: ... + timeit: builtins.float + origin: builtins.str + rid: builtins.str + task: builtins.str + resources: builtins.int @property def field_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.FieldID]: ... @property def fields_audit(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AuditField]: ... + client_type: global___ClientType.ValueType + trace_id: builtins.str @property def kb_counter(self) -> global___AuditKBCounter: ... @property def chat(self) -> global___ChatAudit: ... + success: builtins.bool def __init__( self, *, @@ -262,7 +261,7 @@ class AuditRequest(google.protobuf.message.Message): chat: global___ChatAudit | None = ..., success: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["chat", b"chat", "kb_counter", b"kb_counter", "search", b"search", "time", b"time"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["chat", b"chat", "client_type", b"client_type", "field_metadata", b"field_metadata", "fields", b"fields", "fields_audit", b"fields_audit", "kb_counter", b"kb_counter", "kbid", b"kbid", "origin", b"origin", "resources", b"resources", "rid", b"rid", "search", b"search", "success", b"success", "task", b"task", "time", b"time", "timeit", b"timeit", "trace_id", b"trace_id", "type", b"type", "userid", b"userid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["chat", b"chat", "kb_counter", b"kb_counter", "search", b"search", "time", b"time"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["chat", b"chat", "client_type", b"client_type", "field_metadata", b"field_metadata", "fields", b"fields", "fields_audit", b"fields_audit", "kb_counter", b"kb_counter", "kbid", b"kbid", "origin", b"origin", "resources", b"resources", "rid", b"rid", "search", b"search", "success", b"success", "task", b"task", "time", b"time", "timeit", b"timeit", "trace_id", b"trace_id", "type", b"type", "userid", b"userid"]) -> None: ... global___AuditRequest = AuditRequest diff --git a/nucliadb_protos/python/nucliadb_protos/dataset_pb2.py b/nucliadb_protos/python/nucliadb_protos/dataset_pb2.py index f553efa5b4..eb3ead4321 100644 --- a/nucliadb_protos/python/nucliadb_protos/dataset_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/dataset_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/dataset.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/dataset_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/dataset_pb2.pyi index 3d8c44af4c..e93af7a462 100644 --- a/nucliadb_protos/python/nucliadb_protos/dataset_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/dataset_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -62,11 +61,11 @@ FIELD: LabelFrom.ValueType # 1 RESOURCE: LabelFrom.ValueType # 2 global___LabelFrom = LabelFrom -@typing.final +@typing_extensions.final class TrainSet(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class Filter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -78,15 +77,15 @@ class TrainSet(google.protobuf.message.Message): *, labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labels", b"labels"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labels", b"labels"]) -> None: ... TYPE_FIELD_NUMBER: builtins.int FILTER_FIELD_NUMBER: builtins.int BATCH_SIZE_FIELD_NUMBER: builtins.int type: global___TaskType.ValueType - batch_size: builtins.int @property def filter(self) -> global___TrainSet.Filter: ... + batch_size: builtins.int def __init__( self, *, @@ -94,12 +93,12 @@ class TrainSet(google.protobuf.message.Message): filter: global___TrainSet.Filter | None = ..., batch_size: builtins.int = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["filter", b"filter"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["batch_size", b"batch_size", "filter", b"filter", "type", b"type"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["filter", b"filter"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_size", b"batch_size", "filter", b"filter", "type", b"type"]) -> None: ... global___TrainSet = TrainSet -@typing.final +@typing_extensions.final class Label(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -116,11 +115,11 @@ class Label(google.protobuf.message.Message): label: builtins.str = ..., origin: global___LabelFrom.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["label", b"label", "labelset", b"labelset", "origin", b"origin"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["label", b"label", "labelset", b"labelset", "origin", b"origin"]) -> None: ... global___Label = Label -@typing.final +@typing_extensions.final class TextLabel(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -135,11 +134,11 @@ class TextLabel(google.protobuf.message.Message): text: builtins.str = ..., labels: collections.abc.Iterable[global___Label] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labels", b"labels", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labels", b"labels", "text", b"text"]) -> None: ... global___TextLabel = TextLabel -@typing.final +@typing_extensions.final class MultipleTextSameLabels(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -155,11 +154,11 @@ class MultipleTextSameLabels(google.protobuf.message.Message): text: collections.abc.Iterable[builtins.str] | None = ..., labels: collections.abc.Iterable[global___Label] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labels", b"labels", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labels", b"labels", "text", b"text"]) -> None: ... global___MultipleTextSameLabels = MultipleTextSameLabels -@typing.final +@typing_extensions.final class FieldClassificationBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -171,11 +170,11 @@ class FieldClassificationBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___TextLabel] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___FieldClassificationBatch = FieldClassificationBatch -@typing.final +@typing_extensions.final class ParagraphClassificationBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -187,11 +186,11 @@ class ParagraphClassificationBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___TextLabel] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___ParagraphClassificationBatch = ParagraphClassificationBatch -@typing.final +@typing_extensions.final class SentenceClassificationBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -203,11 +202,11 @@ class SentenceClassificationBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___MultipleTextSameLabels] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___SentenceClassificationBatch = SentenceClassificationBatch -@typing.final +@typing_extensions.final class TokensClassification(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -223,11 +222,11 @@ class TokensClassification(google.protobuf.message.Message): token: collections.abc.Iterable[builtins.str] | None = ..., label: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["label", b"label", "token", b"token"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["label", b"label", "token", b"token"]) -> None: ... global___TokensClassification = TokensClassification -@typing.final +@typing_extensions.final class TokenClassificationBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -239,11 +238,11 @@ class TokenClassificationBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___TokensClassification] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___TokenClassificationBatch = TokenClassificationBatch -@typing.final +@typing_extensions.final class ImageClassification(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -257,11 +256,11 @@ class ImageClassification(google.protobuf.message.Message): selections: builtins.str = ..., page_uri: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["page_uri", b"page_uri", "selections", b"selections"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["page_uri", b"page_uri", "selections", b"selections"]) -> None: ... global___ImageClassification = ImageClassification -@typing.final +@typing_extensions.final class ImageClassificationBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -273,11 +272,11 @@ class ImageClassificationBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___ImageClassification] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___ImageClassificationBatch = ImageClassificationBatch -@typing.final +@typing_extensions.final class ParagraphStreamItem(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -291,11 +290,11 @@ class ParagraphStreamItem(google.protobuf.message.Message): id: builtins.str = ..., text: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "text", b"text"]) -> None: ... global___ParagraphStreamItem = ParagraphStreamItem -@typing.final +@typing_extensions.final class ParagraphStreamingBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -307,11 +306,11 @@ class ParagraphStreamingBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___ParagraphStreamItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___ParagraphStreamingBatch = ParagraphStreamingBatch -@typing.final +@typing_extensions.final class Question(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -329,11 +328,11 @@ class Question(google.protobuf.message.Message): language: builtins.str = ..., paragraphs: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["language", b"language", "paragraphs", b"paragraphs", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["language", b"language", "paragraphs", b"paragraphs", "text", b"text"]) -> None: ... global___Question = Question -@typing.final +@typing_extensions.final class Answer(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -351,22 +350,22 @@ class Answer(google.protobuf.message.Message): language: builtins.str = ..., paragraphs: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["language", b"language", "paragraphs", b"paragraphs", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["language", b"language", "paragraphs", b"paragraphs", "text", b"text"]) -> None: ... global___Answer = Answer -@typing.final +@typing_extensions.final class QuestionAnswerStreamItem(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor QUESTION_FIELD_NUMBER: builtins.int ANSWER_FIELD_NUMBER: builtins.int CANCELLED_BY_USER_FIELD_NUMBER: builtins.int - cancelled_by_user: builtins.bool @property def question(self) -> global___Question: ... @property def answer(self) -> global___Answer: ... + cancelled_by_user: builtins.bool def __init__( self, *, @@ -374,12 +373,12 @@ class QuestionAnswerStreamItem(google.protobuf.message.Message): answer: global___Answer | None = ..., cancelled_by_user: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["answer", b"answer", "question", b"question"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["answer", b"answer", "cancelled_by_user", b"cancelled_by_user", "question", b"question"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["answer", b"answer", "question", b"question"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["answer", b"answer", "cancelled_by_user", b"cancelled_by_user", "question", b"question"]) -> None: ... global___QuestionAnswerStreamItem = QuestionAnswerStreamItem -@typing.final +@typing_extensions.final class QuestionAnswerStreamingBatch(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -391,6 +390,6 @@ class QuestionAnswerStreamingBatch(google.protobuf.message.Message): *, data: collections.abc.Iterable[global___QuestionAnswerStreamItem] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___QuestionAnswerStreamingBatch = QuestionAnswerStreamingBatch diff --git a/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.py b/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.py index daf351b053..7afbe166e3 100644 --- a/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/knowledgebox.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.pyi index 40454a96a6..35ae3b6026 100644 --- a/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/knowledgebox_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -59,7 +58,7 @@ NOTFOUND: KnowledgeBoxResponseStatus.ValueType # 2 ERROR: KnowledgeBoxResponseStatus.ValueType # 3 global___KnowledgeBoxResponseStatus = KnowledgeBoxResponseStatus -@typing.final +@typing_extensions.final class KnowledgeBoxID(google.protobuf.message.Message): """ID""" @@ -75,11 +74,11 @@ class KnowledgeBoxID(google.protobuf.message.Message): slug: builtins.str = ..., uuid: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["slug", b"slug", "uuid", b"uuid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["slug", b"slug", "uuid", b"uuid"]) -> None: ... global___KnowledgeBoxID = KnowledgeBoxID -@typing.final +@typing_extensions.final class KnowledgeBoxConfig(google.protobuf.message.Message): """CONFIG""" @@ -95,15 +94,15 @@ class KnowledgeBoxConfig(google.protobuf.message.Message): RELEASE_CHANNEL_FIELD_NUMBER: builtins.int title: builtins.str description: builtins.str + @property + def enabled_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + @property + def enabled_insights(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... slug: builtins.str disable_vectors: builtins.bool migration_version: builtins.int release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType """DEPRECATED: duplicated field also stored in `writer.proto Shards`""" - @property - def enabled_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... - @property - def enabled_insights(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... def __init__( self, *, @@ -116,11 +115,11 @@ class KnowledgeBoxConfig(google.protobuf.message.Message): migration_version: builtins.int = ..., release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["description", b"description", "disable_vectors", b"disable_vectors", "enabled_filters", b"enabled_filters", "enabled_insights", b"enabled_insights", "migration_version", b"migration_version", "release_channel", b"release_channel", "slug", b"slug", "title", b"title"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "disable_vectors", b"disable_vectors", "enabled_filters", b"enabled_filters", "enabled_insights", b"enabled_insights", "migration_version", b"migration_version", "release_channel", b"release_channel", "slug", b"slug", "title", b"title"]) -> None: ... global___KnowledgeBoxConfig = KnowledgeBoxConfig -@typing.final +@typing_extensions.final class KnowledgeBoxNew(google.protobuf.message.Message): """NEW""" @@ -136,11 +135,15 @@ class KnowledgeBoxNew(google.protobuf.message.Message): LEARNING_CONFIG_FIELD_NUMBER: builtins.int RELEASE_CHANNEL_FIELD_NUMBER: builtins.int slug: builtins.str + @property + def config(self) -> global___KnowledgeBoxConfig: ... forceuuid: builtins.str """this fields are only set by backend when creating hosted KBs""" similarity: nucliadb_protos.utils_pb2.VectorSimilarity.ValueType vector_dimension: builtins.int default_min_score: builtins.float + @property + def matryoshka_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... learning_config: builtins.str """this field are only used by NucliaDB Writer API when creating a KB. Used in onprem scenarios @@ -149,10 +152,6 @@ class KnowledgeBoxNew(google.protobuf.message.Message): """release channel, although not used when backend creates hosted KBs, it's recomputed and changed depending on the environment """ - @property - def config(self) -> global___KnowledgeBoxConfig: ... - @property - def matryoshka_dimensions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... def __init__( self, *, @@ -166,16 +165,16 @@ class KnowledgeBoxNew(google.protobuf.message.Message): learning_config: builtins.str = ..., release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "config", b"config", "default_min_score", b"default_min_score", "vector_dimension", b"vector_dimension"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "config", b"config", "default_min_score", b"default_min_score", "forceuuid", b"forceuuid", "learning_config", b"learning_config", "matryoshka_dimensions", b"matryoshka_dimensions", "release_channel", b"release_channel", "similarity", b"similarity", "slug", b"slug", "vector_dimension", b"vector_dimension"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "config", b"config", "default_min_score", b"default_min_score", "vector_dimension", b"vector_dimension"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "config", b"config", "default_min_score", b"default_min_score", "forceuuid", b"forceuuid", "learning_config", b"learning_config", "matryoshka_dimensions", b"matryoshka_dimensions", "release_channel", b"release_channel", "similarity", b"similarity", "slug", b"slug", "vector_dimension", b"vector_dimension"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_default_min_score", b"_default_min_score"]) -> typing.Literal["default_min_score"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_default_min_score", b"_default_min_score"]) -> typing_extensions.Literal["default_min_score"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_vector_dimension", b"_vector_dimension"]) -> typing.Literal["vector_dimension"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_vector_dimension", b"_vector_dimension"]) -> typing_extensions.Literal["vector_dimension"] | None: ... global___KnowledgeBoxNew = KnowledgeBoxNew -@typing.final +@typing_extensions.final class NewKnowledgeBoxResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -189,11 +188,11 @@ class NewKnowledgeBoxResponse(google.protobuf.message.Message): status: global___KnowledgeBoxResponseStatus.ValueType = ..., uuid: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status", "uuid", b"uuid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status", "uuid", b"uuid"]) -> None: ... global___NewKnowledgeBoxResponse = NewKnowledgeBoxResponse -@typing.final +@typing_extensions.final class KnowledgeBoxUpdate(google.protobuf.message.Message): """UPDATE""" @@ -213,12 +212,12 @@ class KnowledgeBoxUpdate(google.protobuf.message.Message): uuid: builtins.str = ..., config: global___KnowledgeBoxConfig | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["config", b"config"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["config", b"config", "slug", b"slug", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["config", b"config"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["config", b"config", "slug", b"slug", "uuid", b"uuid"]) -> None: ... global___KnowledgeBoxUpdate = KnowledgeBoxUpdate -@typing.final +@typing_extensions.final class UpdateKnowledgeBoxResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -232,11 +231,11 @@ class UpdateKnowledgeBoxResponse(google.protobuf.message.Message): status: global___KnowledgeBoxResponseStatus.ValueType = ..., uuid: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status", "uuid", b"uuid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status", "uuid", b"uuid"]) -> None: ... global___UpdateKnowledgeBoxResponse = UpdateKnowledgeBoxResponse -@typing.final +@typing_extensions.final class GCKnowledgeBoxResponse(google.protobuf.message.Message): """GC""" @@ -248,7 +247,7 @@ class GCKnowledgeBoxResponse(google.protobuf.message.Message): global___GCKnowledgeBoxResponse = GCKnowledgeBoxResponse -@typing.final +@typing_extensions.final class DeleteKnowledgeBoxResponse(google.protobuf.message.Message): """DELETE""" @@ -261,11 +260,11 @@ class DeleteKnowledgeBoxResponse(google.protobuf.message.Message): *, status: global___KnowledgeBoxResponseStatus.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___DeleteKnowledgeBoxResponse = DeleteKnowledgeBoxResponse -@typing.final +@typing_extensions.final class Label(google.protobuf.message.Message): """Labels on a Knowledge Box""" @@ -287,11 +286,11 @@ class Label(google.protobuf.message.Message): text: builtins.str = ..., uri: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["related", b"related", "text", b"text", "title", b"title", "uri", b"uri"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["related", b"related", "text", b"text", "title", b"title", "uri", b"uri"]) -> None: ... global___Label = Label -@typing.final +@typing_extensions.final class LabelSet(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -299,7 +298,7 @@ class LabelSet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LabelSetKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LabelSet._LabelSetKind.ValueType], builtins.type): + class _LabelSetKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LabelSet._LabelSetKind.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor RESOURCES: LabelSet._LabelSetKind.ValueType # 0 PARAGRAPHS: LabelSet._LabelSetKind.ValueType # 1 @@ -319,9 +318,9 @@ class LabelSet(google.protobuf.message.Message): KIND_FIELD_NUMBER: builtins.int title: builtins.str color: builtins.str - multiple: builtins.bool @property def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Label]: ... + multiple: builtins.bool @property def kind(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___LabelSet.LabelSetKind.ValueType]: ... def __init__( @@ -333,15 +332,15 @@ class LabelSet(google.protobuf.message.Message): multiple: builtins.bool = ..., kind: collections.abc.Iterable[global___LabelSet.LabelSetKind.ValueType] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["color", b"color", "kind", b"kind", "labels", b"labels", "multiple", b"multiple", "title", b"title"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["color", b"color", "kind", b"kind", "labels", b"labels", "multiple", b"multiple", "title", b"title"]) -> None: ... global___LabelSet = LabelSet -@typing.final +@typing_extensions.final class Labels(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class LabelsetEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -356,8 +355,8 @@ class Labels(google.protobuf.message.Message): key: builtins.str = ..., value: global___LabelSet | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... LABELSET_FIELD_NUMBER: builtins.int @property @@ -367,11 +366,11 @@ class Labels(google.protobuf.message.Message): *, labelset: collections.abc.Mapping[builtins.str, global___LabelSet] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labelset", b"labelset"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labelset", b"labelset"]) -> None: ... global___Labels = Labels -@typing.final +@typing_extensions.final class Entity(google.protobuf.message.Message): """Entities on a Knowledge Box""" @@ -382,10 +381,10 @@ class Entity(google.protobuf.message.Message): MERGED_FIELD_NUMBER: builtins.int DELETED_FIELD_NUMBER: builtins.int value: builtins.str - merged: builtins.bool - deleted: builtins.bool @property def represents(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + merged: builtins.bool + deleted: builtins.bool def __init__( self, *, @@ -394,11 +393,11 @@ class Entity(google.protobuf.message.Message): merged: builtins.bool = ..., deleted: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["deleted", b"deleted", "merged", b"merged", "represents", b"represents", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["deleted", b"deleted", "merged", b"merged", "represents", b"represents", "value", b"value"]) -> None: ... global___Entity = Entity -@typing.final +@typing_extensions.final class EntitiesGroupSummary(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -415,15 +414,15 @@ class EntitiesGroupSummary(google.protobuf.message.Message): color: builtins.str = ..., custom: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["color", b"color", "custom", b"custom", "title", b"title"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["color", b"color", "custom", b"custom", "title", b"title"]) -> None: ... global___EntitiesGroupSummary = EntitiesGroupSummary -@typing.final +@typing_extensions.final class EntitiesGroup(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class EntitiesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -438,18 +437,18 @@ class EntitiesGroup(google.protobuf.message.Message): key: builtins.str = ..., value: global___Entity | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTITIES_FIELD_NUMBER: builtins.int TITLE_FIELD_NUMBER: builtins.int COLOR_FIELD_NUMBER: builtins.int CUSTOM_FIELD_NUMBER: builtins.int + @property + def entities(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Entity]: ... title: builtins.str color: builtins.str custom: builtins.bool - @property - def entities(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Entity]: ... def __init__( self, *, @@ -458,11 +457,11 @@ class EntitiesGroup(google.protobuf.message.Message): color: builtins.str = ..., custom: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["color", b"color", "custom", b"custom", "entities", b"entities", "title", b"title"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["color", b"color", "custom", b"custom", "entities", b"entities", "title", b"title"]) -> None: ... global___EntitiesGroup = EntitiesGroup -@typing.final +@typing_extensions.final class DeletedEntitiesGroups(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -474,15 +473,15 @@ class DeletedEntitiesGroups(google.protobuf.message.Message): *, entities_groups: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities_groups", b"entities_groups"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities_groups", b"entities_groups"]) -> None: ... global___DeletedEntitiesGroups = DeletedEntitiesGroups -@typing.final +@typing_extensions.final class EntitiesGroups(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class EntitiesGroupsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -497,8 +496,8 @@ class EntitiesGroups(google.protobuf.message.Message): key: builtins.str = ..., value: global___EntitiesGroup | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTITIES_GROUPS_FIELD_NUMBER: builtins.int @property @@ -508,11 +507,11 @@ class EntitiesGroups(google.protobuf.message.Message): *, entities_groups: collections.abc.Mapping[builtins.str, global___EntitiesGroup] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities_groups", b"entities_groups"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities_groups", b"entities_groups"]) -> None: ... global___EntitiesGroups = EntitiesGroups -@typing.final +@typing_extensions.final class EntityGroupDuplicateIndex(google.protobuf.message.Message): """ Structure to represent all duplicates defined in a kb @@ -522,7 +521,7 @@ class EntityGroupDuplicateIndex(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class EntityDuplicates(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -534,13 +533,13 @@ class EntityGroupDuplicateIndex(google.protobuf.message.Message): *, duplicates: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["duplicates", b"duplicates"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["duplicates", b"duplicates"]) -> None: ... - @typing.final + @typing_extensions.final class EntityGroupDuplicates(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class EntitiesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -555,8 +554,8 @@ class EntityGroupDuplicateIndex(google.protobuf.message.Message): key: builtins.str = ..., value: global___EntityGroupDuplicateIndex.EntityDuplicates | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTITIES_FIELD_NUMBER: builtins.int @property @@ -566,9 +565,9 @@ class EntityGroupDuplicateIndex(google.protobuf.message.Message): *, entities: collections.abc.Mapping[builtins.str, global___EntityGroupDuplicateIndex.EntityDuplicates] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities"]) -> None: ... - @typing.final + @typing_extensions.final class EntitiesGroupsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -583,8 +582,8 @@ class EntityGroupDuplicateIndex(google.protobuf.message.Message): key: builtins.str = ..., value: global___EntityGroupDuplicateIndex.EntityGroupDuplicates | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTITIES_GROUPS_FIELD_NUMBER: builtins.int @property @@ -594,11 +593,11 @@ class EntityGroupDuplicateIndex(google.protobuf.message.Message): *, entities_groups: collections.abc.Mapping[builtins.str, global___EntityGroupDuplicateIndex.EntityGroupDuplicates] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities_groups", b"entities_groups"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities_groups", b"entities_groups"]) -> None: ... global___EntityGroupDuplicateIndex = EntityGroupDuplicateIndex -@typing.final +@typing_extensions.final class VectorSet(google.protobuf.message.Message): """Vectorsets""" @@ -614,15 +613,15 @@ class VectorSet(google.protobuf.message.Message): dimension: builtins.int = ..., similarity: nucliadb_protos.utils_pb2.VectorSimilarity.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["dimension", b"dimension", "similarity", b"similarity"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["dimension", b"dimension", "similarity", b"similarity"]) -> None: ... global___VectorSet = VectorSet -@typing.final +@typing_extensions.final class VectorSets(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class VectorsetsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -637,8 +636,8 @@ class VectorSets(google.protobuf.message.Message): key: builtins.str = ..., value: global___VectorSet | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... VECTORSETS_FIELD_NUMBER: builtins.int @property @@ -648,11 +647,11 @@ class VectorSets(google.protobuf.message.Message): *, vectorsets: collections.abc.Mapping[builtins.str, global___VectorSet] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["vectorsets", b"vectorsets"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["vectorsets", b"vectorsets"]) -> None: ... global___VectorSets = VectorSets -@typing.final +@typing_extensions.final class TermSynonyms(google.protobuf.message.Message): """Synonyms of a Knowledge Box""" @@ -666,15 +665,15 @@ class TermSynonyms(google.protobuf.message.Message): *, synonyms: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["synonyms", b"synonyms"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["synonyms", b"synonyms"]) -> None: ... global___TermSynonyms = TermSynonyms -@typing.final +@typing_extensions.final class Synonyms(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class TermsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -689,8 +688,8 @@ class Synonyms(google.protobuf.message.Message): key: builtins.str = ..., value: global___TermSynonyms | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... TERMS_FIELD_NUMBER: builtins.int @property @@ -700,11 +699,11 @@ class Synonyms(google.protobuf.message.Message): *, terms: collections.abc.Mapping[builtins.str, global___TermSynonyms] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["terms", b"terms"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["terms", b"terms"]) -> None: ... global___Synonyms = Synonyms -@typing.final +@typing_extensions.final class SemanticModelMetadata(google.protobuf.message.Message): """Metadata of the model associated to the KB""" @@ -722,7 +721,6 @@ class SemanticModelMetadata(google.protobuf.message.Message): """list of possible subdivisions of the matryoshka embeddings (if the model supports it) """ - def __init__( self, *, @@ -731,16 +729,16 @@ class SemanticModelMetadata(google.protobuf.message.Message): default_min_score: builtins.float | None = ..., matryoshka_dimensions: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "default_min_score", b"default_min_score", "vector_dimension", b"vector_dimension"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "default_min_score", b"default_min_score", "matryoshka_dimensions", b"matryoshka_dimensions", "similarity_function", b"similarity_function", "vector_dimension", b"vector_dimension"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "default_min_score", b"default_min_score", "vector_dimension", b"vector_dimension"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_default_min_score", b"_default_min_score", "_vector_dimension", b"_vector_dimension", "default_min_score", b"default_min_score", "matryoshka_dimensions", b"matryoshka_dimensions", "similarity_function", b"similarity_function", "vector_dimension", b"vector_dimension"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_default_min_score", b"_default_min_score"]) -> typing.Literal["default_min_score"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_default_min_score", b"_default_min_score"]) -> typing_extensions.Literal["default_min_score"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_vector_dimension", b"_vector_dimension"]) -> typing.Literal["vector_dimension"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_vector_dimension", b"_vector_dimension"]) -> typing_extensions.Literal["vector_dimension"] | None: ... global___SemanticModelMetadata = SemanticModelMetadata -@typing.final +@typing_extensions.final class KBConfiguration(google.protobuf.message.Message): """Do not update this model without confirmation of internal Learning Config API @@ -768,6 +766,6 @@ class KBConfiguration(google.protobuf.message.Message): anonymization_model: builtins.str = ..., visual_labeling: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["anonymization_model", b"anonymization_model", "generative_model", b"generative_model", "ner_model", b"ner_model", "semantic_model", b"semantic_model", "visual_labeling", b"visual_labeling"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["anonymization_model", b"anonymization_model", "generative_model", b"generative_model", "ner_model", b"ner_model", "semantic_model", b"semantic_model", "visual_labeling", b"visual_labeling"]) -> None: ... global___KBConfiguration = KBConfiguration diff --git a/nucliadb_protos/python/nucliadb_protos/migrations_pb2.py b/nucliadb_protos/python/nucliadb_protos/migrations_pb2.py index 5e6365ecac..6c2ec2dbd7 100644 --- a/nucliadb_protos/python/nucliadb_protos/migrations_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/migrations_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/migrations.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/migrations_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/migrations_pb2.pyi index 62f5d21416..1b23f5fdbf 100644 --- a/nucliadb_protos/python/nucliadb_protos/migrations_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/migrations_pb2.pyi @@ -2,15 +2,19 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import google.protobuf.descriptor import google.protobuf.message -import typing +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class MigrationInfo(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -24,6 +28,6 @@ class MigrationInfo(google.protobuf.message.Message): current_version: builtins.int = ..., target_version: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["current_version", b"current_version", "target_version", b"target_version"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["current_version", b"current_version", "target_version", b"target_version"]) -> None: ... global___MigrationInfo = MigrationInfo diff --git a/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.py b/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.py index 6640d68a73..dfae25bd7c 100644 --- a/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/nodereader.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.pyi index 406e68e8c0..b38ba98f32 100644 --- a/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/nodereader_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -41,6 +40,7 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) from nucliadb_protos.utils_pb2 import ( COSINE as COSINE, @@ -80,19 +80,19 @@ ENTITIES: SuggestFeatures.ValueType # 0 PARAGRAPHS: SuggestFeatures.ValueType # 1 global___SuggestFeatures = SuggestFeatures -@typing.final +@typing_extensions.final class Filter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor FIELD_LABELS_FIELD_NUMBER: builtins.int PARAGRAPH_LABELS_FIELD_NUMBER: builtins.int EXPRESSION_FIELD_NUMBER: builtins.int - expression: builtins.str - """JSON string with the filter expression""" @property def field_labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property def paragraph_labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + expression: builtins.str + """JSON string with the filter expression""" def __init__( self, *, @@ -100,11 +100,11 @@ class Filter(google.protobuf.message.Message): paragraph_labels: collections.abc.Iterable[builtins.str] | None = ..., expression: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["expression", b"expression", "field_labels", b"field_labels", "paragraph_labels", b"paragraph_labels"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["expression", b"expression", "field_labels", b"field_labels", "paragraph_labels", b"paragraph_labels"]) -> None: ... global___Filter = Filter -@typing.final +@typing_extensions.final class StreamFilter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -112,7 +112,7 @@ class StreamFilter(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ConjunctionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[StreamFilter._Conjunction.ValueType], builtins.type): + class _ConjunctionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[StreamFilter._Conjunction.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor AND: StreamFilter._Conjunction.ValueType # 0 OR: StreamFilter._Conjunction.ValueType # 1 @@ -134,11 +134,11 @@ class StreamFilter(google.protobuf.message.Message): conjunction: global___StreamFilter.Conjunction.ValueType = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["conjunction", b"conjunction", "labels", b"labels"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["conjunction", b"conjunction", "labels", b"labels"]) -> None: ... global___StreamFilter = StreamFilter -@typing.final +@typing_extensions.final class Faceted(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -150,11 +150,11 @@ class Faceted(google.protobuf.message.Message): *, labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labels", b"labels"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labels", b"labels"]) -> None: ... global___Faceted = Faceted -@typing.final +@typing_extensions.final class OrderBy(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -162,7 +162,7 @@ class OrderBy(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _OrderTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OrderBy._OrderType.ValueType], builtins.type): + class _OrderTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OrderBy._OrderType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DESC: OrderBy._OrderType.ValueType # 0 ASC: OrderBy._OrderType.ValueType # 1 @@ -175,7 +175,7 @@ class OrderBy(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _OrderFieldEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OrderBy._OrderField.ValueType], builtins.type): + class _OrderFieldEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OrderBy._OrderField.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CREATED: OrderBy._OrderField.ValueType # 0 MODIFIED: OrderBy._OrderField.ValueType # 1 @@ -197,11 +197,11 @@ class OrderBy(google.protobuf.message.Message): type: global___OrderBy.OrderType.ValueType = ..., sort_by: global___OrderBy.OrderField.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "sort_by", b"sort_by", "type", b"type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "sort_by", b"sort_by", "type", b"type"]) -> None: ... global___OrderBy = OrderBy -@typing.final +@typing_extensions.final class Timestamps(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -225,12 +225,12 @@ class Timestamps(google.protobuf.message.Message): from_created: google.protobuf.timestamp_pb2.Timestamp | None = ..., to_created: google.protobuf.timestamp_pb2.Timestamp | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["from_created", b"from_created", "from_modified", b"from_modified", "to_created", b"to_created", "to_modified", b"to_modified"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["from_created", b"from_created", "from_modified", b"from_modified", "to_created", b"to_created", "to_modified", b"to_modified"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["from_created", b"from_created", "from_modified", b"from_modified", "to_created", b"to_created", "to_modified", b"to_modified"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["from_created", b"from_created", "from_modified", b"from_modified", "to_created", b"to_created", "to_modified", b"to_modified"]) -> None: ... global___Timestamps = Timestamps -@typing.final +@typing_extensions.final class FacetResult(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -244,11 +244,11 @@ class FacetResult(google.protobuf.message.Message): tag: builtins.str = ..., total: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["tag", b"tag", "total", b"total"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["tag", b"tag", "total", b"total"]) -> None: ... global___FacetResult = FacetResult -@typing.final +@typing_extensions.final class FacetResults(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -260,11 +260,11 @@ class FacetResults(google.protobuf.message.Message): *, facetresults: collections.abc.Iterable[global___FacetResult] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["facetresults", b"facetresults"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["facetresults", b"facetresults"]) -> None: ... global___FacetResults = FacetResults -@typing.final +@typing_extensions.final class DocumentSearchRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -284,13 +284,6 @@ class DocumentSearchRequest(google.protobuf.message.Message): MIN_SCORE_FIELD_NUMBER: builtins.int id: builtins.str body: builtins.str - page_number: builtins.int - result_per_page: builtins.int - reload: builtins.bool - only_faceted: builtins.bool - with_status: nucliadb_protos.noderesources_pb2.Resource.ResourceStatus.ValueType - advanced_query: builtins.str - min_score: builtins.float @property def fields(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property @@ -299,8 +292,15 @@ class DocumentSearchRequest(google.protobuf.message.Message): def order(self) -> global___OrderBy: ... @property def faceted(self) -> global___Faceted: ... + page_number: builtins.int + result_per_page: builtins.int @property def timestamps(self) -> global___Timestamps: ... + reload: builtins.bool + only_faceted: builtins.bool + with_status: nucliadb_protos.noderesources_pb2.Resource.ResourceStatus.ValueType + advanced_query: builtins.str + min_score: builtins.float def __init__( self, *, @@ -319,16 +319,16 @@ class DocumentSearchRequest(google.protobuf.message.Message): advanced_query: builtins.str | None = ..., min_score: builtins.float = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_advanced_query", b"_advanced_query", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "faceted", b"faceted", "filter", b"filter", "order", b"order", "timestamps", b"timestamps", "with_status", b"with_status"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_advanced_query", b"_advanced_query", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "body", b"body", "faceted", b"faceted", "fields", b"fields", "filter", b"filter", "id", b"id", "min_score", b"min_score", "only_faceted", b"only_faceted", "order", b"order", "page_number", b"page_number", "reload", b"reload", "result_per_page", b"result_per_page", "timestamps", b"timestamps", "with_status", b"with_status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_advanced_query", b"_advanced_query", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "faceted", b"faceted", "filter", b"filter", "order", b"order", "timestamps", b"timestamps", "with_status", b"with_status"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_advanced_query", b"_advanced_query", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "body", b"body", "faceted", b"faceted", "fields", b"fields", "filter", b"filter", "id", b"id", "min_score", b"min_score", "only_faceted", b"only_faceted", "order", b"order", "page_number", b"page_number", "reload", b"reload", "result_per_page", b"result_per_page", "timestamps", b"timestamps", "with_status", b"with_status"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_advanced_query", b"_advanced_query"]) -> typing.Literal["advanced_query"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_advanced_query", b"_advanced_query"]) -> typing_extensions.Literal["advanced_query"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_with_status", b"_with_status"]) -> typing.Literal["with_status"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_with_status", b"_with_status"]) -> typing_extensions.Literal["with_status"] | None: ... global___DocumentSearchRequest = DocumentSearchRequest -@typing.final +@typing_extensions.final class ParagraphSearchRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -351,17 +351,10 @@ class ParagraphSearchRequest(google.protobuf.message.Message): SECURITY_FIELD_NUMBER: builtins.int id: builtins.str uuid: builtins.str - body: builtins.str - """query this text in all the paragraphs""" - page_number: builtins.int - result_per_page: builtins.int - reload: builtins.bool - with_duplicates: builtins.bool - only_faceted: builtins.bool - advanced_query: builtins.str - min_score: builtins.float @property def fields(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + body: builtins.str + """query this text in all the paragraphs""" @property def filter(self) -> global___Filter: ... @property @@ -369,11 +362,17 @@ class ParagraphSearchRequest(google.protobuf.message.Message): @property def faceted(self) -> global___Faceted: """Faceted{ labels: Vec}""" - + page_number: builtins.int + result_per_page: builtins.int @property def timestamps(self) -> global___Timestamps: ... + reload: builtins.bool + with_duplicates: builtins.bool + only_faceted: builtins.bool + advanced_query: builtins.str @property def key_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + min_score: builtins.float @property def security(self) -> nucliadb_protos.utils_pb2.Security: ... def __init__( @@ -397,16 +396,16 @@ class ParagraphSearchRequest(google.protobuf.message.Message): min_score: builtins.float = ..., security: nucliadb_protos.utils_pb2.Security | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "advanced_query", b"advanced_query", "faceted", b"faceted", "filter", b"filter", "order", b"order", "security", b"security", "timestamps", b"timestamps"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "advanced_query", b"advanced_query", "body", b"body", "faceted", b"faceted", "fields", b"fields", "filter", b"filter", "id", b"id", "key_filters", b"key_filters", "min_score", b"min_score", "only_faceted", b"only_faceted", "order", b"order", "page_number", b"page_number", "reload", b"reload", "result_per_page", b"result_per_page", "security", b"security", "timestamps", b"timestamps", "uuid", b"uuid", "with_duplicates", b"with_duplicates"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "advanced_query", b"advanced_query", "faceted", b"faceted", "filter", b"filter", "order", b"order", "security", b"security", "timestamps", b"timestamps"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "advanced_query", b"advanced_query", "body", b"body", "faceted", b"faceted", "fields", b"fields", "filter", b"filter", "id", b"id", "key_filters", b"key_filters", "min_score", b"min_score", "only_faceted", b"only_faceted", "order", b"order", "page_number", b"page_number", "reload", b"reload", "result_per_page", b"result_per_page", "security", b"security", "timestamps", b"timestamps", "uuid", b"uuid", "with_duplicates", b"with_duplicates"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_advanced_query", b"_advanced_query"]) -> typing.Literal["advanced_query"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_advanced_query", b"_advanced_query"]) -> typing_extensions.Literal["advanced_query"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_security", b"_security"]) -> typing.Literal["security"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_security", b"_security"]) -> typing_extensions.Literal["security"] | None: ... global___ParagraphSearchRequest = ParagraphSearchRequest -@typing.final +@typing_extensions.final class ResultScore(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -423,11 +422,11 @@ class ResultScore(google.protobuf.message.Message): bm25: builtins.float = ..., booster: builtins.float = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bm25", b"bm25", "booster", b"booster"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bm25", b"bm25", "booster", b"booster"]) -> None: ... global___ResultScore = ResultScore -@typing.final +@typing_extensions.final class DocumentResult(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -436,9 +435,9 @@ class DocumentResult(google.protobuf.message.Message): FIELD_FIELD_NUMBER: builtins.int LABELS_FIELD_NUMBER: builtins.int uuid: builtins.str - field: builtins.str @property def score(self) -> global___ResultScore: ... + field: builtins.str @property def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... def __init__( @@ -449,16 +448,16 @@ class DocumentResult(google.protobuf.message.Message): field: builtins.str = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["score", b"score"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "labels", b"labels", "score", b"score", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["score", b"score"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "labels", b"labels", "score", b"score", "uuid", b"uuid"]) -> None: ... global___DocumentResult = DocumentResult -@typing.final +@typing_extensions.final class DocumentSearchResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class FacetsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -473,8 +472,8 @@ class DocumentSearchResponse(google.protobuf.message.Message): key: builtins.str = ..., value: global___FacetResults | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... TOTAL_FIELD_NUMBER: builtins.int RESULTS_FIELD_NUMBER: builtins.int @@ -485,6 +484,10 @@ class DocumentSearchResponse(google.protobuf.message.Message): NEXT_PAGE_FIELD_NUMBER: builtins.int BM25_FIELD_NUMBER: builtins.int total: builtins.int + @property + def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DocumentResult]: ... + @property + def facets(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FacetResults]: ... page_number: builtins.int result_per_page: builtins.int query: builtins.str @@ -492,10 +495,6 @@ class DocumentSearchResponse(google.protobuf.message.Message): next_page: builtins.bool """Is there a next page""" bm25: builtins.bool - @property - def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DocumentResult]: ... - @property - def facets(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FacetResults]: ... def __init__( self, *, @@ -508,11 +507,11 @@ class DocumentSearchResponse(google.protobuf.message.Message): next_page: builtins.bool = ..., bm25: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bm25", b"bm25", "facets", b"facets", "next_page", b"next_page", "page_number", b"page_number", "query", b"query", "result_per_page", b"result_per_page", "results", b"results", "total", b"total"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bm25", b"bm25", "facets", b"facets", "next_page", b"next_page", "page_number", b"page_number", "query", b"query", "result_per_page", b"result_per_page", "results", b"results", "total", b"total"]) -> None: ... global___DocumentSearchResponse = DocumentSearchResponse -@typing.final +@typing_extensions.final class ParagraphResult(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -541,7 +540,6 @@ class ParagraphResult(google.protobuf.message.Message): @property def metadata(self) -> nucliadb_protos.noderesources_pb2.ParagraphMetadata: """Metadata that can't be searched with but is returned on search results""" - @property def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... def __init__( @@ -559,16 +557,16 @@ class ParagraphResult(google.protobuf.message.Message): metadata: nucliadb_protos.noderesources_pb2.ParagraphMetadata | None = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata", "score", b"score"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "field", b"field", "index", b"index", "labels", b"labels", "matches", b"matches", "metadata", b"metadata", "paragraph", b"paragraph", "score", b"score", "split", b"split", "start", b"start", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "score", b"score"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "field", b"field", "index", b"index", "labels", b"labels", "matches", b"matches", "metadata", b"metadata", "paragraph", b"paragraph", "score", b"score", "split", b"split", "start", b"start", "uuid", b"uuid"]) -> None: ... global___ParagraphResult = ParagraphResult -@typing.final +@typing_extensions.final class ParagraphSearchResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class FacetsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -583,8 +581,8 @@ class ParagraphSearchResponse(google.protobuf.message.Message): key: builtins.str = ..., value: global___FacetResults | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... FUZZY_DISTANCE_FIELD_NUMBER: builtins.int TOTAL_FIELD_NUMBER: builtins.int @@ -598,6 +596,12 @@ class ParagraphSearchResponse(google.protobuf.message.Message): EMATCHES_FIELD_NUMBER: builtins.int fuzzy_distance: builtins.int total: builtins.int + @property + def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ParagraphResult]: + """""" + @property + def facets(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FacetResults]: + """For each field what facets are.""" page_number: builtins.int """What page is the answer.""" result_per_page: builtins.int @@ -607,14 +611,6 @@ class ParagraphSearchResponse(google.protobuf.message.Message): next_page: builtins.bool """Is there a next page""" bm25: builtins.bool - @property - def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ParagraphResult]: - """""" - - @property - def facets(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FacetResults]: - """For each field what facets are.""" - @property def ematches(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... def __init__( @@ -631,11 +627,11 @@ class ParagraphSearchResponse(google.protobuf.message.Message): bm25: builtins.bool = ..., ematches: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bm25", b"bm25", "ematches", b"ematches", "facets", b"facets", "fuzzy_distance", b"fuzzy_distance", "next_page", b"next_page", "page_number", b"page_number", "query", b"query", "result_per_page", b"result_per_page", "results", b"results", "total", b"total"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bm25", b"bm25", "ematches", b"ematches", "facets", b"facets", "fuzzy_distance", b"fuzzy_distance", "next_page", b"next_page", "page_number", b"page_number", "query", b"query", "result_per_page", b"result_per_page", "results", b"results", "total", b"total"]) -> None: ... global___ParagraphSearchResponse = ParagraphSearchResponse -@typing.final +@typing_extensions.final class VectorSearchRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -652,6 +648,15 @@ class VectorSearchRequest(google.protobuf.message.Message): MIN_SCORE_FIELD_NUMBER: builtins.int id: builtins.str """Shard ID""" + @property + def vector(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: + """Embedded vector search.""" + @property + def field_labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """labels to filter""" + @property + def paragraph_labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """paragraph labels to filter""" page_number: builtins.int """What page is the answer.""" result_per_page: builtins.int @@ -662,21 +667,9 @@ class VectorSearchRequest(google.protobuf.message.Message): """ID for the vector set. Empty for searching on the original index """ - min_score: builtins.float - @property - def vector(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: - """Embedded vector search.""" - - @property - def field_labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: - """labels to filter""" - - @property - def paragraph_labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: - """paragraph labels to filter""" - @property def key_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + min_score: builtins.float def __init__( self, *, @@ -692,11 +685,11 @@ class VectorSearchRequest(google.protobuf.message.Message): key_filters: collections.abc.Iterable[builtins.str] | None = ..., min_score: builtins.float = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["field_labels", b"field_labels", "id", b"id", "key_filters", b"key_filters", "min_score", b"min_score", "page_number", b"page_number", "paragraph_labels", b"paragraph_labels", "reload", b"reload", "result_per_page", b"result_per_page", "vector", b"vector", "vector_set", b"vector_set", "with_duplicates", b"with_duplicates"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field_labels", b"field_labels", "id", b"id", "key_filters", b"key_filters", "min_score", b"min_score", "page_number", b"page_number", "paragraph_labels", b"paragraph_labels", "reload", b"reload", "result_per_page", b"result_per_page", "vector", b"vector", "vector_set", b"vector_set", "with_duplicates", b"with_duplicates"]) -> None: ... global___VectorSearchRequest = VectorSearchRequest -@typing.final +@typing_extensions.final class DocumentVectorIdentifier(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -707,11 +700,11 @@ class DocumentVectorIdentifier(google.protobuf.message.Message): *, id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["id", b"id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id"]) -> None: ... global___DocumentVectorIdentifier = DocumentVectorIdentifier -@typing.final +@typing_extensions.final class DocumentScored(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -719,9 +712,9 @@ class DocumentScored(google.protobuf.message.Message): SCORE_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int LABELS_FIELD_NUMBER: builtins.int - score: builtins.float @property def doc_id(self) -> global___DocumentVectorIdentifier: ... + score: builtins.float @property def metadata(self) -> nucliadb_protos.noderesources_pb2.SentenceMetadata: ... @property @@ -734,26 +727,25 @@ class DocumentScored(google.protobuf.message.Message): metadata: nucliadb_protos.noderesources_pb2.SentenceMetadata | None = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["doc_id", b"doc_id", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["doc_id", b"doc_id", "labels", b"labels", "metadata", b"metadata", "score", b"score"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["doc_id", b"doc_id", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["doc_id", b"doc_id", "labels", b"labels", "metadata", b"metadata", "score", b"score"]) -> None: ... global___DocumentScored = DocumentScored -@typing.final +@typing_extensions.final class VectorSearchResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor DOCUMENTS_FIELD_NUMBER: builtins.int PAGE_NUMBER_FIELD_NUMBER: builtins.int RESULT_PER_PAGE_FIELD_NUMBER: builtins.int + @property + def documents(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DocumentScored]: + """List of docs closer to the asked one.""" page_number: builtins.int """What page is the answer.""" result_per_page: builtins.int """How many results are in this page.""" - @property - def documents(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DocumentScored]: - """List of docs closer to the asked one.""" - def __init__( self, *, @@ -761,11 +753,11 @@ class VectorSearchResponse(google.protobuf.message.Message): page_number: builtins.int = ..., result_per_page: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["documents", b"documents", "page_number", b"page_number", "result_per_page", b"result_per_page"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["documents", b"documents", "page_number", b"page_number", "result_per_page", b"result_per_page"]) -> None: ... global___VectorSearchResponse = VectorSearchResponse -@typing.final +@typing_extensions.final class RelationNodeFilter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -779,13 +771,13 @@ class RelationNodeFilter(google.protobuf.message.Message): node_type: nucliadb_protos.utils_pb2.RelationNode.NodeType.ValueType = ..., node_subtype: builtins.str | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_node_subtype", b"_node_subtype", "node_subtype", b"node_subtype"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_node_subtype", b"_node_subtype", "node_subtype", b"node_subtype", "node_type", b"node_type"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_node_subtype", b"_node_subtype"]) -> typing.Literal["node_subtype"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_node_subtype", b"_node_subtype", "node_subtype", b"node_subtype"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_node_subtype", b"_node_subtype", "node_subtype", b"node_subtype", "node_type", b"node_type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_node_subtype", b"_node_subtype"]) -> typing_extensions.Literal["node_subtype"] | None: ... global___RelationNodeFilter = RelationNodeFilter -@typing.final +@typing_extensions.final class RelationEdgeFilter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -804,13 +796,13 @@ class RelationEdgeFilter(google.protobuf.message.Message): relation_subtype: builtins.str | None = ..., relation_value: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_relation_subtype", b"_relation_subtype", "relation_subtype", b"relation_subtype"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_relation_subtype", b"_relation_subtype", "relation_subtype", b"relation_subtype", "relation_type", b"relation_type", "relation_value", b"relation_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_relation_subtype", b"_relation_subtype"]) -> typing.Literal["relation_subtype"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_relation_subtype", b"_relation_subtype", "relation_subtype", b"relation_subtype"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_relation_subtype", b"_relation_subtype", "relation_subtype", b"relation_subtype", "relation_type", b"relation_type", "relation_value", b"relation_value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_relation_subtype", b"_relation_subtype"]) -> typing_extensions.Literal["relation_subtype"] | None: ... global___RelationEdgeFilter = RelationEdgeFilter -@typing.final +@typing_extensions.final class RelationPrefixSearchRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -825,11 +817,11 @@ class RelationPrefixSearchRequest(google.protobuf.message.Message): prefix: builtins.str = ..., node_filters: collections.abc.Iterable[global___RelationNodeFilter] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["node_filters", b"node_filters", "prefix", b"prefix"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["node_filters", b"node_filters", "prefix", b"prefix"]) -> None: ... global___RelationPrefixSearchRequest = RelationPrefixSearchRequest -@typing.final +@typing_extensions.final class RelationPrefixSearchResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -841,15 +833,15 @@ class RelationPrefixSearchResponse(google.protobuf.message.Message): *, nodes: collections.abc.Iterable[nucliadb_protos.utils_pb2.RelationNode] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["nodes", b"nodes"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["nodes", b"nodes"]) -> None: ... global___RelationPrefixSearchResponse = RelationPrefixSearchResponse -@typing.final +@typing_extensions.final class EntitiesSubgraphRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class DeletedEntities(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -864,17 +856,16 @@ class EntitiesSubgraphRequest(google.protobuf.message.Message): node_subtype: builtins.str = ..., node_values: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["node_subtype", b"node_subtype", "node_values", b"node_values"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["node_subtype", b"node_subtype", "node_values", b"node_values"]) -> None: ... ENTRY_POINTS_FIELD_NUMBER: builtins.int DEPTH_FIELD_NUMBER: builtins.int DELETED_ENTITIES_FIELD_NUMBER: builtins.int DELETED_GROUPS_FIELD_NUMBER: builtins.int - depth: builtins.int @property def entry_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.utils_pb2.RelationNode]: """List of vertices where search will trigger""" - + depth: builtins.int @property def deleted_entities(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EntitiesSubgraphRequest.DeletedEntities]: ... @property @@ -887,13 +878,13 @@ class EntitiesSubgraphRequest(google.protobuf.message.Message): deleted_entities: collections.abc.Iterable[global___EntitiesSubgraphRequest.DeletedEntities] | None = ..., deleted_groups: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_depth", b"_depth", "depth", b"depth"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_depth", b"_depth", "deleted_entities", b"deleted_entities", "deleted_groups", b"deleted_groups", "depth", b"depth", "entry_points", b"entry_points"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_depth", b"_depth"]) -> typing.Literal["depth"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_depth", b"_depth", "depth", b"depth"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_depth", b"_depth", "deleted_entities", b"deleted_entities", "deleted_groups", b"deleted_groups", "depth", b"depth", "entry_points", b"entry_points"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_depth", b"_depth"]) -> typing_extensions.Literal["depth"] | None: ... global___EntitiesSubgraphRequest = EntitiesSubgraphRequest -@typing.final +@typing_extensions.final class EntitiesSubgraphResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -905,11 +896,11 @@ class EntitiesSubgraphResponse(google.protobuf.message.Message): *, relations: collections.abc.Iterable[nucliadb_protos.utils_pb2.Relation] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["relations", b"relations"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["relations", b"relations"]) -> None: ... global___EntitiesSubgraphResponse = EntitiesSubgraphResponse -@typing.final +@typing_extensions.final class RelationSearchRequest(google.protobuf.message.Message): """Query relation index to obtain different information about the knowledge graph. It can be queried using the following strategies: @@ -939,12 +930,12 @@ class RelationSearchRequest(google.protobuf.message.Message): prefix: global___RelationPrefixSearchRequest | None = ..., subgraph: global___EntitiesSubgraphRequest | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["prefix", b"prefix", "subgraph", b"subgraph"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["prefix", b"prefix", "reload", b"reload", "shard_id", b"shard_id", "subgraph", b"subgraph"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["prefix", b"prefix", "subgraph", b"subgraph"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["prefix", b"prefix", "reload", b"reload", "shard_id", b"shard_id", "subgraph", b"subgraph"]) -> None: ... global___RelationSearchRequest = RelationSearchRequest -@typing.final +@typing_extensions.final class RelationSearchResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -960,12 +951,12 @@ class RelationSearchResponse(google.protobuf.message.Message): prefix: global___RelationPrefixSearchResponse | None = ..., subgraph: global___EntitiesSubgraphResponse | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["prefix", b"prefix", "subgraph", b"subgraph"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["prefix", b"prefix", "subgraph", b"subgraph"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["prefix", b"prefix", "subgraph", b"subgraph"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["prefix", b"prefix", "subgraph", b"subgraph"]) -> None: ... global___RelationSearchResponse = RelationSearchResponse -@typing.final +@typing_extensions.final class SearchRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -995,46 +986,44 @@ class SearchRequest(google.protobuf.message.Message): MIN_SCORE_BM25_FIELD_NUMBER: builtins.int SECURITY_FIELD_NUMBER: builtins.int shard: builtins.str - body: builtins.str - """query this text in all the paragraphs""" - page_number: builtins.int - result_per_page: builtins.int - vectorset: builtins.str - reload: builtins.bool - paragraph: builtins.bool - document: builtins.bool - with_duplicates: builtins.bool - only_faceted: builtins.bool - advanced_query: builtins.str - with_status: nucliadb_protos.noderesources_pb2.Resource.ResourceStatus.ValueType - min_score_semantic: builtins.float - min_score_bm25: builtins.float @property def fields(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + body: builtins.str + """query this text in all the paragraphs""" @property def filter(self) -> global___Filter: ... @property def order(self) -> global___OrderBy: ... @property def faceted(self) -> global___Faceted: ... + page_number: builtins.int + result_per_page: builtins.int @property def timestamps(self) -> global___Timestamps: ... @property def vector(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: """Embedded vector search.""" - + vectorset: builtins.str + reload: builtins.bool + paragraph: builtins.bool + document: builtins.bool + with_duplicates: builtins.bool + only_faceted: builtins.bool + advanced_query: builtins.str + with_status: nucliadb_protos.noderesources_pb2.Resource.ResourceStatus.ValueType @property def relations(self) -> global___RelationSearchRequest: """if provided, search metadata for this nodes (nodes at distance one) and get the shortest path between nodes """ - @property def relation_prefix(self) -> global___RelationPrefixSearchRequest: ... @property def relation_subgraph(self) -> global___EntitiesSubgraphRequest: ... @property def key_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + min_score_semantic: builtins.float + min_score_bm25: builtins.float @property def security(self) -> nucliadb_protos.utils_pb2.Security: ... def __init__( @@ -1066,18 +1055,18 @@ class SearchRequest(google.protobuf.message.Message): min_score_bm25: builtins.float = ..., security: nucliadb_protos.utils_pb2.Security | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "faceted", b"faceted", "filter", b"filter", "order", b"order", "relation_prefix", b"relation_prefix", "relation_subgraph", b"relation_subgraph", "relations", b"relations", "security", b"security", "timestamps", b"timestamps", "with_status", b"with_status"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "body", b"body", "document", b"document", "faceted", b"faceted", "fields", b"fields", "filter", b"filter", "key_filters", b"key_filters", "min_score_bm25", b"min_score_bm25", "min_score_semantic", b"min_score_semantic", "only_faceted", b"only_faceted", "order", b"order", "page_number", b"page_number", "paragraph", b"paragraph", "relation_prefix", b"relation_prefix", "relation_subgraph", b"relation_subgraph", "relations", b"relations", "reload", b"reload", "result_per_page", b"result_per_page", "security", b"security", "shard", b"shard", "timestamps", b"timestamps", "vector", b"vector", "vectorset", b"vectorset", "with_duplicates", b"with_duplicates", "with_status", b"with_status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "faceted", b"faceted", "filter", b"filter", "order", b"order", "relation_prefix", b"relation_prefix", "relation_subgraph", b"relation_subgraph", "relations", b"relations", "security", b"security", "timestamps", b"timestamps", "with_status", b"with_status"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_advanced_query", b"_advanced_query", "_security", b"_security", "_with_status", b"_with_status", "advanced_query", b"advanced_query", "body", b"body", "document", b"document", "faceted", b"faceted", "fields", b"fields", "filter", b"filter", "key_filters", b"key_filters", "min_score_bm25", b"min_score_bm25", "min_score_semantic", b"min_score_semantic", "only_faceted", b"only_faceted", "order", b"order", "page_number", b"page_number", "paragraph", b"paragraph", "relation_prefix", b"relation_prefix", "relation_subgraph", b"relation_subgraph", "relations", b"relations", "reload", b"reload", "result_per_page", b"result_per_page", "security", b"security", "shard", b"shard", "timestamps", b"timestamps", "vector", b"vector", "vectorset", b"vectorset", "with_duplicates", b"with_duplicates", "with_status", b"with_status"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_advanced_query", b"_advanced_query"]) -> typing.Literal["advanced_query"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_advanced_query", b"_advanced_query"]) -> typing_extensions.Literal["advanced_query"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_security", b"_security"]) -> typing.Literal["security"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_security", b"_security"]) -> typing_extensions.Literal["security"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_with_status", b"_with_status"]) -> typing.Literal["with_status"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_with_status", b"_with_status"]) -> typing_extensions.Literal["with_status"] | None: ... global___SearchRequest = SearchRequest -@typing.final +@typing_extensions.final class SuggestRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1111,31 +1100,31 @@ class SuggestRequest(google.protobuf.message.Message): fields: collections.abc.Iterable[builtins.str] | None = ..., key_filters: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["filter", b"filter", "timestamps", b"timestamps"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["body", b"body", "features", b"features", "fields", b"fields", "filter", b"filter", "key_filters", b"key_filters", "shard", b"shard", "timestamps", b"timestamps"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["filter", b"filter", "timestamps", b"timestamps"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "features", b"features", "fields", b"fields", "filter", b"filter", "key_filters", b"key_filters", "shard", b"shard", "timestamps", b"timestamps"]) -> None: ... global___SuggestRequest = SuggestRequest -@typing.final +@typing_extensions.final class RelatedEntities(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor ENTITIES_FIELD_NUMBER: builtins.int TOTAL_FIELD_NUMBER: builtins.int - total: builtins.int @property def entities(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + total: builtins.int def __init__( self, *, entities: collections.abc.Iterable[builtins.str] | None = ..., total: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities", "total", b"total"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "total", b"total"]) -> None: ... global___RelatedEntities = RelatedEntities -@typing.final +@typing_extensions.final class SuggestResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1145,16 +1134,15 @@ class SuggestResponse(google.protobuf.message.Message): EMATCHES_FIELD_NUMBER: builtins.int ENTITY_RESULTS_FIELD_NUMBER: builtins.int total: builtins.int - query: builtins.str - """The text that lead to this results""" @property def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ParagraphResult]: ... + query: builtins.str + """The text that lead to this results""" @property def ematches(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property def entity_results(self) -> global___RelationPrefixSearchResponse: """Entities related with the query""" - def __init__( self, *, @@ -1164,12 +1152,12 @@ class SuggestResponse(google.protobuf.message.Message): ematches: collections.abc.Iterable[builtins.str] | None = ..., entity_results: global___RelationPrefixSearchResponse | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["entity_results", b"entity_results"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["ematches", b"ematches", "entity_results", b"entity_results", "query", b"query", "results", b"results", "total", b"total"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["entity_results", b"entity_results"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["ematches", b"ematches", "entity_results", b"entity_results", "query", b"query", "results", b"results", "total", b"total"]) -> None: ... global___SuggestResponse = SuggestResponse -@typing.final +@typing_extensions.final class SearchResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1193,12 +1181,12 @@ class SearchResponse(google.protobuf.message.Message): vector: global___VectorSearchResponse | None = ..., relation: global___RelationSearchResponse | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["document", b"document", "paragraph", b"paragraph", "relation", b"relation", "vector", b"vector"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["document", b"document", "paragraph", b"paragraph", "relation", b"relation", "vector", b"vector"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["document", b"document", "paragraph", b"paragraph", "relation", b"relation", "vector", b"vector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["document", b"document", "paragraph", b"paragraph", "relation", b"relation", "vector", b"vector"]) -> None: ... global___SearchResponse = SearchResponse -@typing.final +@typing_extensions.final class IdCollection(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1210,11 +1198,11 @@ class IdCollection(google.protobuf.message.Message): *, ids: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["ids", b"ids"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ids", b"ids"]) -> None: ... global___IdCollection = IdCollection -@typing.final +@typing_extensions.final class RelationEdge(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1228,11 +1216,11 @@ class RelationEdge(google.protobuf.message.Message): edge_type: nucliadb_protos.utils_pb2.Relation.RelationType.ValueType = ..., property: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["edge_type", b"edge_type", "property", b"property"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["edge_type", b"edge_type", "property", b"property"]) -> None: ... global___RelationEdge = RelationEdge -@typing.final +@typing_extensions.final class EdgeList(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1244,31 +1232,31 @@ class EdgeList(google.protobuf.message.Message): *, list: collections.abc.Iterable[global___RelationEdge] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["list", b"list"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["list", b"list"]) -> None: ... global___EdgeList = EdgeList -@typing.final +@typing_extensions.final class GetShardRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor SHARD_ID_FIELD_NUMBER: builtins.int VECTORSET_FIELD_NUMBER: builtins.int - vectorset: builtins.str @property def shard_id(self) -> nucliadb_protos.noderesources_pb2.ShardId: ... + vectorset: builtins.str def __init__( self, *, shard_id: nucliadb_protos.noderesources_pb2.ShardId | None = ..., vectorset: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["shard_id", b"shard_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["shard_id", b"shard_id", "vectorset", b"vectorset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["shard_id", b"shard_id"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["shard_id", b"shard_id", "vectorset", b"vectorset"]) -> None: ... global___GetShardRequest = GetShardRequest -@typing.final +@typing_extensions.final class ParagraphItem(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1283,11 +1271,11 @@ class ParagraphItem(google.protobuf.message.Message): id: builtins.str = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "labels", b"labels"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "labels", b"labels"]) -> None: ... global___ParagraphItem = ParagraphItem -@typing.final +@typing_extensions.final class DocumentItem(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1305,11 +1293,11 @@ class DocumentItem(google.protobuf.message.Message): field: builtins.str = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "labels", b"labels", "uuid", b"uuid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "labels", b"labels", "uuid", b"uuid"]) -> None: ... global___DocumentItem = DocumentItem -@typing.final +@typing_extensions.final class StreamRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1317,9 +1305,9 @@ class StreamRequest(google.protobuf.message.Message): RELOAD_FIELD_NUMBER: builtins.int SHARD_ID_FIELD_NUMBER: builtins.int FILTER_FIELD_NUMBER: builtins.int - reload: builtins.bool @property def filter__deprecated(self) -> global___Filter: ... + reload: builtins.bool @property def shard_id(self) -> nucliadb_protos.noderesources_pb2.ShardId: ... @property @@ -1332,12 +1320,12 @@ class StreamRequest(google.protobuf.message.Message): shard_id: nucliadb_protos.noderesources_pb2.ShardId | None = ..., filter: global___StreamFilter | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["filter", b"filter", "filter__deprecated", b"filter__deprecated", "shard_id", b"shard_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["filter", b"filter", "filter__deprecated", b"filter__deprecated", "reload", b"reload", "shard_id", b"shard_id"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["filter", b"filter", "filter__deprecated", b"filter__deprecated", "shard_id", b"shard_id"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["filter", b"filter", "filter__deprecated", b"filter__deprecated", "reload", b"reload", "shard_id", b"shard_id"]) -> None: ... global___StreamRequest = StreamRequest -@typing.final +@typing_extensions.final class GetShardFilesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1348,11 +1336,11 @@ class GetShardFilesRequest(google.protobuf.message.Message): *, shard_id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["shard_id", b"shard_id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["shard_id", b"shard_id"]) -> None: ... global___GetShardFilesRequest = GetShardFilesRequest -@typing.final +@typing_extensions.final class ShardFileList(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1364,11 +1352,11 @@ class ShardFileList(google.protobuf.message.Message): *, files: collections.abc.Iterable[global___ShardFile] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["files", b"files"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["files", b"files"]) -> None: ... global___ShardFileList = ShardFileList -@typing.final +@typing_extensions.final class ShardFile(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1382,11 +1370,11 @@ class ShardFile(google.protobuf.message.Message): relative_path: builtins.str = ..., size: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["relative_path", b"relative_path", "size", b"size"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["relative_path", b"relative_path", "size", b"size"]) -> None: ... global___ShardFile = ShardFile -@typing.final +@typing_extensions.final class DownloadShardFileRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1400,11 +1388,11 @@ class DownloadShardFileRequest(google.protobuf.message.Message): shard_id: builtins.str = ..., relative_path: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["relative_path", b"relative_path", "shard_id", b"shard_id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["relative_path", b"relative_path", "shard_id", b"shard_id"]) -> None: ... global___DownloadShardFileRequest = DownloadShardFileRequest -@typing.final +@typing_extensions.final class ShardFileChunk(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1418,6 +1406,6 @@ class ShardFileChunk(google.protobuf.message.Message): data: builtins.bytes = ..., index: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data", "index", b"index"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "index", b"index"]) -> None: ... global___ShardFileChunk = ShardFileChunk diff --git a/nucliadb_protos/python/nucliadb_protos/nodereader_pb2_grpc.pyi b/nucliadb_protos/python/nucliadb_protos/nodereader_pb2_grpc.pyi index 4f18f1ae5b..98b214e52d 100644 --- a/nucliadb_protos/python/nucliadb_protos/nodereader_pb2_grpc.pyi +++ b/nucliadb_protos/python/nucliadb_protos/nodereader_pb2_grpc.pyi @@ -2,14 +2,11 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import abc import collections.abc import grpc -import grpc.aio import nucliadb_protos.nodereader_pb2 import nucliadb_protos.noderesources_pb2 -import typing from nucliadb_protos.noderesources_pb2 import ( EmptyQuery as EmptyQuery, EmptyResponse as EmptyResponse, @@ -32,6 +29,7 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) from nucliadb_protos.utils_pb2 import ( COSINE as COSINE, @@ -54,184 +52,77 @@ from nucliadb_protos.utils_pb2 import ( Vectors as Vectors, ) -_T = typing.TypeVar("_T") - -class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... - -class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] - ... - class NodeReaderStub: """Implemented at nucliadb_object_storage""" - def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + def __init__(self, channel: grpc.Channel) -> None: ... GetShard: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.GetShardRequest, nucliadb_protos.noderesources_pb2.Shard, ] - DocumentSearch: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.DocumentSearchRequest, nucliadb_protos.nodereader_pb2.DocumentSearchResponse, ] - ParagraphSearch: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.ParagraphSearchRequest, nucliadb_protos.nodereader_pb2.ParagraphSearchResponse, ] - VectorSearch: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.VectorSearchRequest, nucliadb_protos.nodereader_pb2.VectorSearchResponse, ] - RelationSearch: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.RelationSearchRequest, nucliadb_protos.nodereader_pb2.RelationSearchResponse, ] - DocumentIds: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodereader_pb2.IdCollection, ] - ParagraphIds: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodereader_pb2.IdCollection, ] - VectorIds: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodereader_pb2.IdCollection, ] - RelationIds: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodereader_pb2.IdCollection, ] - RelationEdges: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodereader_pb2.EdgeList, ] - Search: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.SearchRequest, nucliadb_protos.nodereader_pb2.SearchResponse, ] - Suggest: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.SuggestRequest, nucliadb_protos.nodereader_pb2.SuggestResponse, ] - Paragraphs: grpc.UnaryStreamMultiCallable[ nucliadb_protos.nodereader_pb2.StreamRequest, nucliadb_protos.nodereader_pb2.ParagraphItem, ] """Streams""" - Documents: grpc.UnaryStreamMultiCallable[ nucliadb_protos.nodereader_pb2.StreamRequest, nucliadb_protos.nodereader_pb2.DocumentItem, ] - GetShardFiles: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodereader_pb2.GetShardFilesRequest, nucliadb_protos.nodereader_pb2.ShardFileList, ] """Shard Download""" - DownloadShardFile: grpc.UnaryStreamMultiCallable[ nucliadb_protos.nodereader_pb2.DownloadShardFileRequest, nucliadb_protos.nodereader_pb2.ShardFileChunk, ] -class NodeReaderAsyncStub: - """Implemented at nucliadb_object_storage""" - - GetShard: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.GetShardRequest, - nucliadb_protos.noderesources_pb2.Shard, - ] - - DocumentSearch: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.DocumentSearchRequest, - nucliadb_protos.nodereader_pb2.DocumentSearchResponse, - ] - - ParagraphSearch: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.ParagraphSearchRequest, - nucliadb_protos.nodereader_pb2.ParagraphSearchResponse, - ] - - VectorSearch: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.VectorSearchRequest, - nucliadb_protos.nodereader_pb2.VectorSearchResponse, - ] - - RelationSearch: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.RelationSearchRequest, - nucliadb_protos.nodereader_pb2.RelationSearchResponse, - ] - - DocumentIds: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodereader_pb2.IdCollection, - ] - - ParagraphIds: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodereader_pb2.IdCollection, - ] - - VectorIds: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodereader_pb2.IdCollection, - ] - - RelationIds: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodereader_pb2.IdCollection, - ] - - RelationEdges: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodereader_pb2.EdgeList, - ] - - Search: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.SearchRequest, - nucliadb_protos.nodereader_pb2.SearchResponse, - ] - - Suggest: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.SuggestRequest, - nucliadb_protos.nodereader_pb2.SuggestResponse, - ] - - Paragraphs: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.nodereader_pb2.StreamRequest, - nucliadb_protos.nodereader_pb2.ParagraphItem, - ] - """Streams""" - - Documents: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.nodereader_pb2.StreamRequest, - nucliadb_protos.nodereader_pb2.DocumentItem, - ] - - GetShardFiles: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodereader_pb2.GetShardFilesRequest, - nucliadb_protos.nodereader_pb2.ShardFileList, - ] - """Shard Download""" - - DownloadShardFile: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.nodereader_pb2.DownloadShardFileRequest, - nucliadb_protos.nodereader_pb2.ShardFileChunk, - ] - class NodeReaderServicer(metaclass=abc.ABCMeta): """Implemented at nucliadb_object_storage""" @@ -239,114 +130,99 @@ class NodeReaderServicer(metaclass=abc.ABCMeta): def GetShard( self, request: nucliadb_protos.nodereader_pb2.GetShardRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.Shard, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.Shard]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.Shard: ... @abc.abstractmethod def DocumentSearch( self, request: nucliadb_protos.nodereader_pb2.DocumentSearchRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.DocumentSearchResponse, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.DocumentSearchResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.DocumentSearchResponse: ... @abc.abstractmethod def ParagraphSearch( self, request: nucliadb_protos.nodereader_pb2.ParagraphSearchRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.ParagraphSearchResponse, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.ParagraphSearchResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.ParagraphSearchResponse: ... @abc.abstractmethod def VectorSearch( self, request: nucliadb_protos.nodereader_pb2.VectorSearchRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.VectorSearchResponse, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.VectorSearchResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.VectorSearchResponse: ... @abc.abstractmethod def RelationSearch( self, request: nucliadb_protos.nodereader_pb2.RelationSearchRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.RelationSearchResponse, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.RelationSearchResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.RelationSearchResponse: ... @abc.abstractmethod def DocumentIds( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.IdCollection, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.IdCollection]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.IdCollection: ... @abc.abstractmethod def ParagraphIds( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.IdCollection, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.IdCollection]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.IdCollection: ... @abc.abstractmethod def VectorIds( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.IdCollection, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.IdCollection]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.IdCollection: ... @abc.abstractmethod def RelationIds( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.IdCollection, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.IdCollection]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.IdCollection: ... @abc.abstractmethod def RelationEdges( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.EdgeList, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.EdgeList]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.EdgeList: ... @abc.abstractmethod def Search( self, request: nucliadb_protos.nodereader_pb2.SearchRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.SearchResponse, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.SearchResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.SearchResponse: ... @abc.abstractmethod def Suggest( self, request: nucliadb_protos.nodereader_pb2.SuggestRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.SuggestResponse, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.SuggestResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.SuggestResponse: ... @abc.abstractmethod def Paragraphs( self, request: nucliadb_protos.nodereader_pb2.StreamRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.nodereader_pb2.ParagraphItem], collections.abc.AsyncIterator[nucliadb_protos.nodereader_pb2.ParagraphItem]]: + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.nodereader_pb2.ParagraphItem]: """Streams""" - @abc.abstractmethod def Documents( self, request: nucliadb_protos.nodereader_pb2.StreamRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.nodereader_pb2.DocumentItem], collections.abc.AsyncIterator[nucliadb_protos.nodereader_pb2.DocumentItem]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.nodereader_pb2.DocumentItem]: ... @abc.abstractmethod def GetShardFiles( self, request: nucliadb_protos.nodereader_pb2.GetShardFilesRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodereader_pb2.ShardFileList, collections.abc.Awaitable[nucliadb_protos.nodereader_pb2.ShardFileList]]: + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodereader_pb2.ShardFileList: """Shard Download""" - @abc.abstractmethod def DownloadShardFile( self, request: nucliadb_protos.nodereader_pb2.DownloadShardFileRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.nodereader_pb2.ShardFileChunk], collections.abc.AsyncIterator[nucliadb_protos.nodereader_pb2.ShardFileChunk]]: ... + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.nodereader_pb2.ShardFileChunk]: ... -def add_NodeReaderServicer_to_server(servicer: NodeReaderServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... +def add_NodeReaderServicer_to_server(servicer: NodeReaderServicer, server: grpc.Server) -> None: ... diff --git a/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.py b/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.py index 3f74501cea..db08365875 100644 --- a/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/noderesources.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -17,15 +17,19 @@ from nucliadb_protos.utils_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#nucliadb_protos/noderesources.proto\x12\rnoderesources\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bnucliadb_protos/utils.proto\"/\n\x0fTextInformation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x0e\n\x06labels\x18\x02 \x03(\t\"j\n\rIndexMetadata\x12,\n\x08modified\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x07\x63reated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x15\n\x07ShardId\x12\n\n\x02id\x18\x01 \x01(\t\"/\n\x08ShardIds\x12#\n\x03ids\x18\x01 \x03(\x0b\x32\x16.noderesources.ShardId\"\xcb\x04\n\x0cShardCreated\x12\n\n\x02id\x18\x01 \x01(\t\x12\x45\n\x10\x64ocument_service\x18\x02 \x01(\x0e\x32+.noderesources.ShardCreated.DocumentService\x12G\n\x11paragraph_service\x18\x03 \x01(\x0e\x32,.noderesources.ShardCreated.ParagraphService\x12\x41\n\x0evector_service\x18\x04 \x01(\x0e\x32).noderesources.ShardCreated.VectorService\x12\x45\n\x10relation_service\x18\x05 \x01(\x0e\x32+.noderesources.ShardCreated.RelationService\"D\n\x0f\x44ocumentService\x12\x0f\n\x0b\x44OCUMENT_V0\x10\x00\x12\x0f\n\x0b\x44OCUMENT_V1\x10\x01\x12\x0f\n\x0b\x44OCUMENT_V2\x10\x02\"Z\n\x10ParagraphService\x12\x10\n\x0cPARAGRAPH_V0\x10\x00\x12\x10\n\x0cPARAGRAPH_V1\x10\x01\x12\x10\n\x0cPARAGRAPH_V2\x10\x02\x12\x10\n\x0cPARAGRAPH_V3\x10\x03\"-\n\rVectorService\x12\r\n\tVECTOR_V0\x10\x00\x12\r\n\tVECTOR_V1\x10\x01\"D\n\x0fRelationService\x12\x0f\n\x0bRELATION_V0\x10\x00\x12\x0f\n\x0bRELATION_V1\x10\x01\x12\x0f\n\x0bRELATION_V2\x10\x02\",\n\nResourceID\x12\x10\n\x08shard_id\x18\x01 \x01(\t\x12\x0c\n\x04uuid\x18\x02 \x01(\t\"\x80\x01\n\x05Shard\x12.\n\x08metadata\x18\x05 \x01(\x0b\x32\x1c.noderesources.ShardMetadata\x12\x10\n\x08shard_id\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x01(\x04\x12\x12\n\nparagraphs\x18\x03 \x01(\x04\x12\x11\n\tsentences\x18\x04 \x01(\x04\"\x0f\n\rEmptyResponse\"\x0c\n\nEmptyQuery\"\x87\x01\n\x08Position\x12\r\n\x05index\x18\x01 \x01(\x04\x12\r\n\x05start\x18\x02 \x01(\x04\x12\x0b\n\x03\x65nd\x18\x03 \x01(\x04\x12\x13\n\x0bpage_number\x18\x04 \x01(\x04\x12\x0f\n\x07in_page\x18\x07 \x01(\x08\x12\x15\n\rstart_seconds\x18\x05 \x03(\r\x12\x13\n\x0b\x65nd_seconds\x18\x06 \x03(\r\"2\n\x0eRepresentation\x12\x12\n\nis_a_table\x18\x01 \x01(\x08\x12\x0c\n\x04\x66ile\x18\x02 \x01(\t\"\x8e\x01\n\x10SentenceMetadata\x12)\n\x08position\x18\x01 \x01(\x0b\x32\x17.noderesources.Position\x12\x18\n\x10page_with_visual\x18\x02 \x01(\x08\x12\x35\n\x0erepresentation\x18\x03 \x01(\x0b\x32\x1d.noderesources.Representation\"S\n\x0eVectorSentence\x12\x0e\n\x06vector\x18\x01 \x03(\x02\x12\x31\n\x08metadata\x18\t \x01(\x0b\x32\x1f.noderesources.SentenceMetadata\"\x8f\x01\n\x11ParagraphMetadata\x12)\n\x08position\x18\x01 \x01(\x0b\x32\x17.noderesources.Position\x12\x18\n\x10page_with_visual\x18\x02 \x01(\x08\x12\x35\n\x0erepresentation\x18\x03 \x01(\x0b\x32\x1d.noderesources.Representation\"\xca\x02\n\x0eIndexParagraph\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x0e\n\x06labels\x18\x03 \x03(\t\x12?\n\tsentences\x18\x04 \x03(\x0b\x32,.noderesources.IndexParagraph.SentencesEntry\x12\r\n\x05\x66ield\x18\x05 \x01(\t\x12\r\n\x05split\x18\x06 \x01(\t\x12\r\n\x05index\x18\x07 \x01(\x04\x12\x19\n\x11repeated_in_field\x18\x08 \x01(\x08\x12\x32\n\x08metadata\x18\t \x01(\x0b\x32 .noderesources.ParagraphMetadata\x1aO\n\x0eSentencesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.noderesources.VectorSentence:\x02\x38\x01\"G\n\x0bVectorSetID\x12%\n\x05shard\x18\x01 \x01(\x0b\x32\x16.noderesources.ShardId\x12\x11\n\tvectorset\x18\x02 \x01(\t\"I\n\rVectorSetList\x12%\n\x05shard\x18\x01 \x01(\x0b\x32\x16.noderesources.ShardId\x12\x11\n\tvectorset\x18\x02 \x03(\t\"\xa7\x01\n\x0fIndexParagraphs\x12\x42\n\nparagraphs\x18\x01 \x03(\x0b\x32..noderesources.IndexParagraphs.ParagraphsEntry\x1aP\n\x0fParagraphsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.noderesources.IndexParagraph:\x02\x38\x01\"\xe4\x07\n\x08Resource\x12+\n\x08resource\x18\x01 \x01(\x0b\x32\x19.noderesources.ResourceID\x12.\n\x08metadata\x18\x02 \x01(\x0b\x32\x1c.noderesources.IndexMetadata\x12\x31\n\x05texts\x18\x03 \x03(\x0b\x32\".noderesources.Resource.TextsEntry\x12\x0e\n\x06labels\x18\x04 \x03(\t\x12\x36\n\x06status\x18\x05 \x01(\x0e\x32&.noderesources.Resource.ResourceStatus\x12;\n\nparagraphs\x18\x06 \x03(\x0b\x32\'.noderesources.Resource.ParagraphsEntry\x12\x1c\n\x14paragraphs_to_delete\x18\x07 \x03(\t\x12\x1b\n\x13sentences_to_delete\x18\x08 \x03(\t\x12\"\n\trelations\x18\t \x03(\x0b\x32\x0f.utils.Relation\x12\x10\n\x08shard_id\x18\x0b \x01(\t\x12\x35\n\x07vectors\x18\x0c \x03(\x0b\x32$.noderesources.Resource.VectorsEntry\x12G\n\x11vectors_to_delete\x18\r \x03(\x0b\x32,.noderesources.Resource.VectorsToDeleteEntry\x12&\n\x08security\x18\x0e \x01(\x0b\x32\x0f.utils.SecurityH\x00\x88\x01\x01\x1aL\n\nTextsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12-\n\x05value\x18\x02 \x01(\x0b\x32\x1e.noderesources.TextInformation:\x02\x38\x01\x1aQ\n\x0fParagraphsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12-\n\x05value\x18\x02 \x01(\x0b\x32\x1e.noderesources.IndexParagraphs:\x02\x38\x01\x1a\x42\n\x0cVectorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.utils.UserVectors:\x02\x38\x01\x1aN\n\x14VectorsToDeleteEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.utils.UserVectorsList:\x02\x38\x01\"h\n\x0eResourceStatus\x12\r\n\tPROCESSED\x10\x00\x12\t\n\x05\x45MPTY\x10\x01\x12\t\n\x05\x45RROR\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\x0b\n\x07PENDING\x10\x04\x12\x0b\n\x07\x42LOCKED\x10\x05\x12\x0b\n\x07\x45XPIRED\x10\x06\x42\x0b\n\t_security\"M\n\rShardMetadata\x12\x0c\n\x04kbid\x18\x01 \x01(\t\x12.\n\x0frelease_channel\x18\x02 \x01(\x0e\x32\x15.utils.ReleaseChannel\"\xf8\x02\n\x0cNodeMetadata\x12\x16\n\nload_score\x18\x01 \x01(\x02\x42\x02\x18\x01\x12\x13\n\x0bshard_count\x18\x02 \x01(\x04\x12;\n\x06shards\x18\x03 \x03(\x0b\x32\'.noderesources.NodeMetadata.ShardsEntryB\x02\x18\x01\x12\x0f\n\x07node_id\x18\x04 \x01(\t\x12\x1c\n\x0fprimary_node_id\x18\x05 \x01(\tH\x00\x88\x01\x01\x12\x16\n\x0e\x61vailable_disk\x18\x06 \x01(\x04\x12\x12\n\ntotal_disk\x18\x07 \x01(\x04\x1a\x35\n\rShardMetadata\x12\x0c\n\x04kbid\x18\x01 \x01(\t\x12\x16\n\nload_score\x18\x02 \x01(\x02\x42\x02\x18\x01\x1aX\n\x0bShardsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).noderesources.NodeMetadata.ShardMetadata:\x02\x38\x01\x42\x12\n\x10_primary_node_idP\x01\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#nucliadb_protos/noderesources.proto\x12\rnoderesources\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1bnucliadb_protos/utils.proto\"/\n\x0fTextInformation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x0e\n\x06labels\x18\x02 \x03(\t\"j\n\rIndexMetadata\x12,\n\x08modified\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x07\x63reated\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x15\n\x07ShardId\x12\n\n\x02id\x18\x01 \x01(\t\"/\n\x08ShardIds\x12#\n\x03ids\x18\x01 \x03(\x0b\x32\x16.noderesources.ShardId\"\xcb\x04\n\x0cShardCreated\x12\n\n\x02id\x18\x01 \x01(\t\x12\x45\n\x10\x64ocument_service\x18\x02 \x01(\x0e\x32+.noderesources.ShardCreated.DocumentService\x12G\n\x11paragraph_service\x18\x03 \x01(\x0e\x32,.noderesources.ShardCreated.ParagraphService\x12\x41\n\x0evector_service\x18\x04 \x01(\x0e\x32).noderesources.ShardCreated.VectorService\x12\x45\n\x10relation_service\x18\x05 \x01(\x0e\x32+.noderesources.ShardCreated.RelationService\"D\n\x0f\x44ocumentService\x12\x0f\n\x0b\x44OCUMENT_V0\x10\x00\x12\x0f\n\x0b\x44OCUMENT_V1\x10\x01\x12\x0f\n\x0b\x44OCUMENT_V2\x10\x02\"Z\n\x10ParagraphService\x12\x10\n\x0cPARAGRAPH_V0\x10\x00\x12\x10\n\x0cPARAGRAPH_V1\x10\x01\x12\x10\n\x0cPARAGRAPH_V2\x10\x02\x12\x10\n\x0cPARAGRAPH_V3\x10\x03\"-\n\rVectorService\x12\r\n\tVECTOR_V0\x10\x00\x12\r\n\tVECTOR_V1\x10\x01\"D\n\x0fRelationService\x12\x0f\n\x0bRELATION_V0\x10\x00\x12\x0f\n\x0bRELATION_V1\x10\x01\x12\x0f\n\x0bRELATION_V2\x10\x02\",\n\nResourceID\x12\x10\n\x08shard_id\x18\x01 \x01(\t\x12\x0c\n\x04uuid\x18\x02 \x01(\t\"\x80\x01\n\x05Shard\x12.\n\x08metadata\x18\x05 \x01(\x0b\x32\x1c.noderesources.ShardMetadata\x12\x10\n\x08shard_id\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x01(\x04\x12\x12\n\nparagraphs\x18\x03 \x01(\x04\x12\x11\n\tsentences\x18\x04 \x01(\x04\"\x0f\n\rEmptyResponse\"\x0c\n\nEmptyQuery\"\x87\x01\n\x08Position\x12\r\n\x05index\x18\x01 \x01(\x04\x12\r\n\x05start\x18\x02 \x01(\x04\x12\x0b\n\x03\x65nd\x18\x03 \x01(\x04\x12\x13\n\x0bpage_number\x18\x04 \x01(\x04\x12\x0f\n\x07in_page\x18\x07 \x01(\x08\x12\x15\n\rstart_seconds\x18\x05 \x03(\r\x12\x13\n\x0b\x65nd_seconds\x18\x06 \x03(\r\"2\n\x0eRepresentation\x12\x12\n\nis_a_table\x18\x01 \x01(\x08\x12\x0c\n\x04\x66ile\x18\x02 \x01(\t\"\x8e\x01\n\x10SentenceMetadata\x12)\n\x08position\x18\x01 \x01(\x0b\x32\x17.noderesources.Position\x12\x18\n\x10page_with_visual\x18\x02 \x01(\x08\x12\x35\n\x0erepresentation\x18\x03 \x01(\x0b\x32\x1d.noderesources.Representation\"S\n\x0eVectorSentence\x12\x0e\n\x06vector\x18\x01 \x03(\x02\x12\x31\n\x08metadata\x18\t \x01(\x0b\x32\x1f.noderesources.SentenceMetadata\"\xaa\x01\n\x12VectorsetSentences\x12\x43\n\tsentences\x18\x01 \x03(\x0b\x32\x30.noderesources.VectorsetSentences.SentencesEntry\x1aO\n\x0eSentencesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.noderesources.VectorSentence:\x02\x38\x01\"\x8f\x01\n\x11ParagraphMetadata\x12)\n\x08position\x18\x01 \x01(\x0b\x32\x17.noderesources.Position\x12\x18\n\x10page_with_visual\x18\x02 \x01(\x08\x12\x35\n\x0erepresentation\x18\x03 \x01(\x0b\x32\x1d.noderesources.Representation\"\xff\x03\n\x0eIndexParagraph\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x0e\n\x06labels\x18\x03 \x03(\t\x12?\n\tsentences\x18\x04 \x03(\x0b\x32,.noderesources.IndexParagraph.SentencesEntry\x12T\n\x14vectorsets_sentences\x18\n \x03(\x0b\x32\x36.noderesources.IndexParagraph.VectorsetsSentencesEntry\x12\r\n\x05\x66ield\x18\x05 \x01(\t\x12\r\n\x05split\x18\x06 \x01(\t\x12\r\n\x05index\x18\x07 \x01(\x04\x12\x19\n\x11repeated_in_field\x18\x08 \x01(\x08\x12\x32\n\x08metadata\x18\t \x01(\x0b\x32 .noderesources.ParagraphMetadata\x1aO\n\x0eSentencesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.noderesources.VectorSentence:\x02\x38\x01\x1a]\n\x18VectorsetsSentencesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.noderesources.VectorsetSentences:\x02\x38\x01\"G\n\x0bVectorSetID\x12%\n\x05shard\x18\x01 \x01(\x0b\x32\x16.noderesources.ShardId\x12\x11\n\tvectorset\x18\x02 \x01(\t\"J\n\rVectorSetList\x12%\n\x05shard\x18\x01 \x01(\x0b\x32\x16.noderesources.ShardId\x12\x12\n\nvectorsets\x18\x02 \x03(\t\"\xa7\x01\n\x0fIndexParagraphs\x12\x42\n\nparagraphs\x18\x01 \x03(\x0b\x32..noderesources.IndexParagraphs.ParagraphsEntry\x1aP\n\x0fParagraphsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.noderesources.IndexParagraph:\x02\x38\x01\"\xec\x07\n\x08Resource\x12+\n\x08resource\x18\x01 \x01(\x0b\x32\x19.noderesources.ResourceID\x12.\n\x08metadata\x18\x02 \x01(\x0b\x32\x1c.noderesources.IndexMetadata\x12\x31\n\x05texts\x18\x03 \x03(\x0b\x32\".noderesources.Resource.TextsEntry\x12\x0e\n\x06labels\x18\x04 \x03(\t\x12\x36\n\x06status\x18\x05 \x01(\x0e\x32&.noderesources.Resource.ResourceStatus\x12;\n\nparagraphs\x18\x06 \x03(\x0b\x32\'.noderesources.Resource.ParagraphsEntry\x12\x1c\n\x14paragraphs_to_delete\x18\x07 \x03(\t\x12\x1b\n\x13sentences_to_delete\x18\x08 \x03(\t\x12\"\n\trelations\x18\t \x03(\x0b\x32\x0f.utils.Relation\x12\x10\n\x08shard_id\x18\x0b \x01(\t\x12\x39\n\x07vectors\x18\x0c \x03(\x0b\x32$.noderesources.Resource.VectorsEntryB\x02\x18\x01\x12K\n\x11vectors_to_delete\x18\r \x03(\x0b\x32,.noderesources.Resource.VectorsToDeleteEntryB\x02\x18\x01\x12&\n\x08security\x18\x0e \x01(\x0b\x32\x0f.utils.SecurityH\x00\x88\x01\x01\x1aL\n\nTextsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12-\n\x05value\x18\x02 \x01(\x0b\x32\x1e.noderesources.TextInformation:\x02\x38\x01\x1aQ\n\x0fParagraphsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12-\n\x05value\x18\x02 \x01(\x0b\x32\x1e.noderesources.IndexParagraphs:\x02\x38\x01\x1a\x42\n\x0cVectorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.utils.UserVectors:\x02\x38\x01\x1aN\n\x14VectorsToDeleteEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.utils.UserVectorsList:\x02\x38\x01\"h\n\x0eResourceStatus\x12\r\n\tPROCESSED\x10\x00\x12\t\n\x05\x45MPTY\x10\x01\x12\t\n\x05\x45RROR\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03\x12\x0b\n\x07PENDING\x10\x04\x12\x0b\n\x07\x42LOCKED\x10\x05\x12\x0b\n\x07\x45XPIRED\x10\x06\x42\x0b\n\t_security\"M\n\rShardMetadata\x12\x0c\n\x04kbid\x18\x01 \x01(\t\x12.\n\x0frelease_channel\x18\x02 \x01(\x0e\x32\x15.utils.ReleaseChannel\"\xf8\x02\n\x0cNodeMetadata\x12\x16\n\nload_score\x18\x01 \x01(\x02\x42\x02\x18\x01\x12\x13\n\x0bshard_count\x18\x02 \x01(\x04\x12;\n\x06shards\x18\x03 \x03(\x0b\x32\'.noderesources.NodeMetadata.ShardsEntryB\x02\x18\x01\x12\x0f\n\x07node_id\x18\x04 \x01(\t\x12\x1c\n\x0fprimary_node_id\x18\x05 \x01(\tH\x00\x88\x01\x01\x12\x16\n\x0e\x61vailable_disk\x18\x06 \x01(\x04\x12\x12\n\ntotal_disk\x18\x07 \x01(\x04\x1a\x35\n\rShardMetadata\x12\x0c\n\x04kbid\x18\x01 \x01(\t\x12\x16\n\nload_score\x18\x02 \x01(\x02\x42\x02\x18\x01\x1aX\n\x0bShardsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).noderesources.NodeMetadata.ShardMetadata:\x02\x38\x01\x42\x12\n\x10_primary_node_idP\x01\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'nucliadb_protos.noderesources_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None + _globals['_VECTORSETSENTENCES_SENTENCESENTRY']._options = None + _globals['_VECTORSETSENTENCES_SENTENCESENTRY']._serialized_options = b'8\001' _globals['_INDEXPARAGRAPH_SENTENCESENTRY']._options = None _globals['_INDEXPARAGRAPH_SENTENCESENTRY']._serialized_options = b'8\001' + _globals['_INDEXPARAGRAPH_VECTORSETSSENTENCESENTRY']._options = None + _globals['_INDEXPARAGRAPH_VECTORSETSSENTENCESENTRY']._serialized_options = b'8\001' _globals['_INDEXPARAGRAPHS_PARAGRAPHSENTRY']._options = None _globals['_INDEXPARAGRAPHS_PARAGRAPHSENTRY']._serialized_options = b'8\001' _globals['_RESOURCE_TEXTSENTRY']._options = None @@ -36,6 +40,10 @@ _globals['_RESOURCE_VECTORSENTRY']._serialized_options = b'8\001' _globals['_RESOURCE_VECTORSTODELETEENTRY']._options = None _globals['_RESOURCE_VECTORSTODELETEENTRY']._serialized_options = b'8\001' + _globals['_RESOURCE'].fields_by_name['vectors']._options = None + _globals['_RESOURCE'].fields_by_name['vectors']._serialized_options = b'\030\001' + _globals['_RESOURCE'].fields_by_name['vectors_to_delete']._options = None + _globals['_RESOURCE'].fields_by_name['vectors_to_delete']._serialized_options = b'\030\001' _globals['_NODEMETADATA_SHARDMETADATA'].fields_by_name['load_score']._options = None _globals['_NODEMETADATA_SHARDMETADATA'].fields_by_name['load_score']._serialized_options = b'\030\001' _globals['_NODEMETADATA_SHARDSENTRY']._options = None @@ -78,38 +86,44 @@ _globals['_SENTENCEMETADATA']._serialized_end=1476 _globals['_VECTORSENTENCE']._serialized_start=1478 _globals['_VECTORSENTENCE']._serialized_end=1561 - _globals['_PARAGRAPHMETADATA']._serialized_start=1564 - _globals['_PARAGRAPHMETADATA']._serialized_end=1707 - _globals['_INDEXPARAGRAPH']._serialized_start=1710 - _globals['_INDEXPARAGRAPH']._serialized_end=2040 - _globals['_INDEXPARAGRAPH_SENTENCESENTRY']._serialized_start=1961 - _globals['_INDEXPARAGRAPH_SENTENCESENTRY']._serialized_end=2040 - _globals['_VECTORSETID']._serialized_start=2042 - _globals['_VECTORSETID']._serialized_end=2113 - _globals['_VECTORSETLIST']._serialized_start=2115 - _globals['_VECTORSETLIST']._serialized_end=2188 - _globals['_INDEXPARAGRAPHS']._serialized_start=2191 - _globals['_INDEXPARAGRAPHS']._serialized_end=2358 - _globals['_INDEXPARAGRAPHS_PARAGRAPHSENTRY']._serialized_start=2278 - _globals['_INDEXPARAGRAPHS_PARAGRAPHSENTRY']._serialized_end=2358 - _globals['_RESOURCE']._serialized_start=2361 - _globals['_RESOURCE']._serialized_end=3357 - _globals['_RESOURCE_TEXTSENTRY']._serialized_start=2931 - _globals['_RESOURCE_TEXTSENTRY']._serialized_end=3007 - _globals['_RESOURCE_PARAGRAPHSENTRY']._serialized_start=3009 - _globals['_RESOURCE_PARAGRAPHSENTRY']._serialized_end=3090 - _globals['_RESOURCE_VECTORSENTRY']._serialized_start=3092 - _globals['_RESOURCE_VECTORSENTRY']._serialized_end=3158 - _globals['_RESOURCE_VECTORSTODELETEENTRY']._serialized_start=3160 - _globals['_RESOURCE_VECTORSTODELETEENTRY']._serialized_end=3238 - _globals['_RESOURCE_RESOURCESTATUS']._serialized_start=3240 - _globals['_RESOURCE_RESOURCESTATUS']._serialized_end=3344 - _globals['_SHARDMETADATA']._serialized_start=3359 - _globals['_SHARDMETADATA']._serialized_end=3436 - _globals['_NODEMETADATA']._serialized_start=3439 - _globals['_NODEMETADATA']._serialized_end=3815 - _globals['_NODEMETADATA_SHARDMETADATA']._serialized_start=3652 - _globals['_NODEMETADATA_SHARDMETADATA']._serialized_end=3705 - _globals['_NODEMETADATA_SHARDSENTRY']._serialized_start=3707 - _globals['_NODEMETADATA_SHARDSENTRY']._serialized_end=3795 + _globals['_VECTORSETSENTENCES']._serialized_start=1564 + _globals['_VECTORSETSENTENCES']._serialized_end=1734 + _globals['_VECTORSETSENTENCES_SENTENCESENTRY']._serialized_start=1655 + _globals['_VECTORSETSENTENCES_SENTENCESENTRY']._serialized_end=1734 + _globals['_PARAGRAPHMETADATA']._serialized_start=1737 + _globals['_PARAGRAPHMETADATA']._serialized_end=1880 + _globals['_INDEXPARAGRAPH']._serialized_start=1883 + _globals['_INDEXPARAGRAPH']._serialized_end=2394 + _globals['_INDEXPARAGRAPH_SENTENCESENTRY']._serialized_start=1655 + _globals['_INDEXPARAGRAPH_SENTENCESENTRY']._serialized_end=1734 + _globals['_INDEXPARAGRAPH_VECTORSETSSENTENCESENTRY']._serialized_start=2301 + _globals['_INDEXPARAGRAPH_VECTORSETSSENTENCESENTRY']._serialized_end=2394 + _globals['_VECTORSETID']._serialized_start=2396 + _globals['_VECTORSETID']._serialized_end=2467 + _globals['_VECTORSETLIST']._serialized_start=2469 + _globals['_VECTORSETLIST']._serialized_end=2543 + _globals['_INDEXPARAGRAPHS']._serialized_start=2546 + _globals['_INDEXPARAGRAPHS']._serialized_end=2713 + _globals['_INDEXPARAGRAPHS_PARAGRAPHSENTRY']._serialized_start=2633 + _globals['_INDEXPARAGRAPHS_PARAGRAPHSENTRY']._serialized_end=2713 + _globals['_RESOURCE']._serialized_start=2716 + _globals['_RESOURCE']._serialized_end=3720 + _globals['_RESOURCE_TEXTSENTRY']._serialized_start=3294 + _globals['_RESOURCE_TEXTSENTRY']._serialized_end=3370 + _globals['_RESOURCE_PARAGRAPHSENTRY']._serialized_start=3372 + _globals['_RESOURCE_PARAGRAPHSENTRY']._serialized_end=3453 + _globals['_RESOURCE_VECTORSENTRY']._serialized_start=3455 + _globals['_RESOURCE_VECTORSENTRY']._serialized_end=3521 + _globals['_RESOURCE_VECTORSTODELETEENTRY']._serialized_start=3523 + _globals['_RESOURCE_VECTORSTODELETEENTRY']._serialized_end=3601 + _globals['_RESOURCE_RESOURCESTATUS']._serialized_start=3603 + _globals['_RESOURCE_RESOURCESTATUS']._serialized_end=3707 + _globals['_SHARDMETADATA']._serialized_start=3722 + _globals['_SHARDMETADATA']._serialized_end=3799 + _globals['_NODEMETADATA']._serialized_start=3802 + _globals['_NODEMETADATA']._serialized_end=4178 + _globals['_NODEMETADATA_SHARDMETADATA']._serialized_start=4015 + _globals['_NODEMETADATA_SHARDMETADATA']._serialized_end=4068 + _globals['_NODEMETADATA_SHARDSENTRY']._serialized_start=4070 + _globals['_NODEMETADATA_SHARDSENTRY']._serialized_end=4158 # @@protoc_insertion_point(module_scope) diff --git a/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.pyi index e5e8f9ac80..4e26348217 100644 --- a/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/noderesources_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -41,7 +40,7 @@ from nucliadb_protos.utils_pb2 import ( DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class TextInformation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -56,11 +55,11 @@ class TextInformation(google.protobuf.message.Message): text: builtins.str = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labels", b"labels", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labels", b"labels", "text", b"text"]) -> None: ... global___TextInformation = TextInformation -@typing.final +@typing_extensions.final class IndexMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -69,23 +68,21 @@ class IndexMetadata(google.protobuf.message.Message): @property def modified(self) -> google.protobuf.timestamp_pb2.Timestamp: """Tantivy doc & para""" - @property def created(self) -> google.protobuf.timestamp_pb2.Timestamp: """Tantivy doc & para""" - def __init__( self, *, modified: google.protobuf.timestamp_pb2.Timestamp | None = ..., created: google.protobuf.timestamp_pb2.Timestamp | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["created", b"created", "modified", b"modified"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["created", b"created", "modified", b"modified"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created", b"created", "modified", b"modified"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created", b"created", "modified", b"modified"]) -> None: ... global___IndexMetadata = IndexMetadata -@typing.final +@typing_extensions.final class ShardId(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -96,11 +93,11 @@ class ShardId(google.protobuf.message.Message): *, id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["id", b"id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id"]) -> None: ... global___ShardId = ShardId -@typing.final +@typing_extensions.final class ShardIds(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -112,11 +109,11 @@ class ShardIds(google.protobuf.message.Message): *, ids: collections.abc.Iterable[global___ShardId] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["ids", b"ids"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ids", b"ids"]) -> None: ... global___ShardIds = ShardIds -@typing.final +@typing_extensions.final class ShardCreated(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -124,7 +121,7 @@ class ShardCreated(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _DocumentServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._DocumentService.ValueType], builtins.type): + class _DocumentServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._DocumentService.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DOCUMENT_V0: ShardCreated._DocumentService.ValueType # 0 DOCUMENT_V1: ShardCreated._DocumentService.ValueType # 1 @@ -139,7 +136,7 @@ class ShardCreated(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ParagraphServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._ParagraphService.ValueType], builtins.type): + class _ParagraphServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._ParagraphService.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PARAGRAPH_V0: ShardCreated._ParagraphService.ValueType # 0 PARAGRAPH_V1: ShardCreated._ParagraphService.ValueType # 1 @@ -156,7 +153,7 @@ class ShardCreated(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _VectorServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._VectorService.ValueType], builtins.type): + class _VectorServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._VectorService.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor VECTOR_V0: ShardCreated._VectorService.ValueType # 0 VECTOR_V1: ShardCreated._VectorService.ValueType # 1 @@ -169,7 +166,7 @@ class ShardCreated(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _RelationServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._RelationService.ValueType], builtins.type): + class _RelationServiceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ShardCreated._RelationService.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor RELATION_V0: ShardCreated._RelationService.ValueType # 0 RELATION_V1: ShardCreated._RelationService.ValueType # 1 @@ -199,11 +196,11 @@ class ShardCreated(google.protobuf.message.Message): vector_service: global___ShardCreated.VectorService.ValueType = ..., relation_service: global___ShardCreated.RelationService.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["document_service", b"document_service", "id", b"id", "paragraph_service", b"paragraph_service", "relation_service", b"relation_service", "vector_service", b"vector_service"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["document_service", b"document_service", "id", b"id", "paragraph_service", b"paragraph_service", "relation_service", b"relation_service", "vector_service", b"vector_service"]) -> None: ... global___ShardCreated = ShardCreated -@typing.final +@typing_extensions.final class ResourceID(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -217,11 +214,11 @@ class ResourceID(google.protobuf.message.Message): shard_id: builtins.str = ..., uuid: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["shard_id", b"shard_id", "uuid", b"uuid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["shard_id", b"shard_id", "uuid", b"uuid"]) -> None: ... global___ResourceID = ResourceID -@typing.final +@typing_extensions.final class Shard(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -230,12 +227,12 @@ class Shard(google.protobuf.message.Message): FIELDS_FIELD_NUMBER: builtins.int PARAGRAPHS_FIELD_NUMBER: builtins.int SENTENCES_FIELD_NUMBER: builtins.int + @property + def metadata(self) -> global___ShardMetadata: ... shard_id: builtins.str fields: builtins.int paragraphs: builtins.int sentences: builtins.int - @property - def metadata(self) -> global___ShardMetadata: ... def __init__( self, *, @@ -245,12 +242,12 @@ class Shard(google.protobuf.message.Message): paragraphs: builtins.int = ..., sentences: builtins.int = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["fields", b"fields", "metadata", b"metadata", "paragraphs", b"paragraphs", "sentences", b"sentences", "shard_id", b"shard_id"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields", "metadata", b"metadata", "paragraphs", b"paragraphs", "sentences", b"sentences", "shard_id", b"shard_id"]) -> None: ... global___Shard = Shard -@typing.final +@typing_extensions.final class EmptyResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -260,7 +257,7 @@ class EmptyResponse(google.protobuf.message.Message): global___EmptyResponse = EmptyResponse -@typing.final +@typing_extensions.final class EmptyQuery(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -270,7 +267,7 @@ class EmptyQuery(google.protobuf.message.Message): global___EmptyQuery = EmptyQuery -@typing.final +@typing_extensions.final class Position(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -290,7 +287,6 @@ class Position(google.protobuf.message.Message): @property def start_seconds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """For multimedia only""" - @property def end_seconds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... def __init__( @@ -304,11 +300,11 @@ class Position(google.protobuf.message.Message): start_seconds: collections.abc.Iterable[builtins.int] | None = ..., end_seconds: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "end_seconds", b"end_seconds", "in_page", b"in_page", "index", b"index", "page_number", b"page_number", "start", b"start", "start_seconds", b"start_seconds"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "end_seconds", b"end_seconds", "in_page", b"in_page", "index", b"index", "page_number", b"page_number", "start", b"start", "start_seconds", b"start_seconds"]) -> None: ... global___Position = Position -@typing.final +@typing_extensions.final class Representation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -322,20 +318,20 @@ class Representation(google.protobuf.message.Message): is_a_table: builtins.bool = ..., file: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["file", b"file", "is_a_table", b"is_a_table"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file", b"file", "is_a_table", b"is_a_table"]) -> None: ... global___Representation = Representation -@typing.final +@typing_extensions.final class SentenceMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor POSITION_FIELD_NUMBER: builtins.int PAGE_WITH_VISUAL_FIELD_NUMBER: builtins.int REPRESENTATION_FIELD_NUMBER: builtins.int - page_with_visual: builtins.bool @property def position(self) -> global___Position: ... + page_with_visual: builtins.bool @property def representation(self) -> global___Representation: ... def __init__( @@ -345,12 +341,12 @@ class SentenceMetadata(google.protobuf.message.Message): page_with_visual: builtins.bool = ..., representation: global___Representation | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["position", b"position", "representation", b"representation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["page_with_visual", b"page_with_visual", "position", b"position", "representation", b"representation"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["position", b"position", "representation", b"representation"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["page_with_visual", b"page_with_visual", "position", b"position", "representation", b"representation"]) -> None: ... global___SentenceMetadata = SentenceMetadata -@typing.final +@typing_extensions.final class VectorSentence(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -366,21 +362,56 @@ class VectorSentence(google.protobuf.message.Message): vector: collections.abc.Iterable[builtins.float] | None = ..., metadata: global___SentenceMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "vector", b"vector"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "vector", b"vector"]) -> None: ... global___VectorSentence = VectorSentence -@typing.final +@typing_extensions.final +class VectorsetSentences(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class SentencesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___VectorSentence: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: global___VectorSentence | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + SENTENCES_FIELD_NUMBER: builtins.int + @property + def sentences(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___VectorSentence]: + """key is full id for vectors""" + def __init__( + self, + *, + sentences: collections.abc.Mapping[builtins.str, global___VectorSentence] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["sentences", b"sentences"]) -> None: ... + +global___VectorsetSentences = VectorsetSentences + +@typing_extensions.final class ParagraphMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor POSITION_FIELD_NUMBER: builtins.int PAGE_WITH_VISUAL_FIELD_NUMBER: builtins.int REPRESENTATION_FIELD_NUMBER: builtins.int - page_with_visual: builtins.bool @property def position(self) -> global___Position: ... + page_with_visual: builtins.bool @property def representation(self) -> global___Representation: ... def __init__( @@ -390,16 +421,16 @@ class ParagraphMetadata(google.protobuf.message.Message): page_with_visual: builtins.bool = ..., representation: global___Representation | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["position", b"position", "representation", b"representation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["page_with_visual", b"page_with_visual", "position", b"position", "representation", b"representation"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["position", b"position", "representation", b"representation"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["page_with_visual", b"page_with_visual", "position", b"position", "representation", b"representation"]) -> None: ... global___ParagraphMetadata = ParagraphMetadata -@typing.final +@typing_extensions.final class IndexParagraph(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class SentencesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -414,13 +445,32 @@ class IndexParagraph(google.protobuf.message.Message): key: builtins.str = ..., value: global___VectorSentence | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + @typing_extensions.final + class VectorsetsSentencesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___VectorsetSentences: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: global___VectorsetSentences | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... START_FIELD_NUMBER: builtins.int END_FIELD_NUMBER: builtins.int LABELS_FIELD_NUMBER: builtins.int SENTENCES_FIELD_NUMBER: builtins.int + VECTORSETS_SENTENCES_FIELD_NUMBER: builtins.int FIELD_FIELD_NUMBER: builtins.int SPLIT_FIELD_NUMBER: builtins.int INDEX_FIELD_NUMBER: builtins.int @@ -430,19 +480,20 @@ class IndexParagraph(google.protobuf.message.Message): """Start end position in field text""" end: builtins.int """Start end position in field text""" - field: builtins.str - split: builtins.str - """split were it belongs""" - index: builtins.int - repeated_in_field: builtins.bool @property def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """Paragraph specific labels""" - @property def sentences(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___VectorSentence]: """key is full id for vectors""" - + @property + def vectorsets_sentences(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___VectorsetSentences]: + """key is vectorset id""" + field: builtins.str + split: builtins.str + """split were it belongs""" + index: builtins.int + repeated_in_field: builtins.bool @property def metadata(self) -> global___ParagraphMetadata: ... def __init__( @@ -452,63 +503,64 @@ class IndexParagraph(google.protobuf.message.Message): end: builtins.int = ..., labels: collections.abc.Iterable[builtins.str] | None = ..., sentences: collections.abc.Mapping[builtins.str, global___VectorSentence] | None = ..., + vectorsets_sentences: collections.abc.Mapping[builtins.str, global___VectorsetSentences] | None = ..., field: builtins.str = ..., split: builtins.str = ..., index: builtins.int = ..., repeated_in_field: builtins.bool = ..., metadata: global___ParagraphMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "field", b"field", "index", b"index", "labels", b"labels", "metadata", b"metadata", "repeated_in_field", b"repeated_in_field", "sentences", b"sentences", "split", b"split", "start", b"start"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "field", b"field", "index", b"index", "labels", b"labels", "metadata", b"metadata", "repeated_in_field", b"repeated_in_field", "sentences", b"sentences", "split", b"split", "start", b"start", "vectorsets_sentences", b"vectorsets_sentences"]) -> None: ... global___IndexParagraph = IndexParagraph -@typing.final +@typing_extensions.final class VectorSetID(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor SHARD_FIELD_NUMBER: builtins.int VECTORSET_FIELD_NUMBER: builtins.int - vectorset: builtins.str @property def shard(self) -> global___ShardId: ... + vectorset: builtins.str def __init__( self, *, shard: global___ShardId | None = ..., vectorset: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["shard", b"shard"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["shard", b"shard", "vectorset", b"vectorset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["shard", b"shard"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["shard", b"shard", "vectorset", b"vectorset"]) -> None: ... global___VectorSetID = VectorSetID -@typing.final +@typing_extensions.final class VectorSetList(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor SHARD_FIELD_NUMBER: builtins.int - VECTORSET_FIELD_NUMBER: builtins.int + VECTORSETS_FIELD_NUMBER: builtins.int @property def shard(self) -> global___ShardId: ... @property - def vectorset(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def vectorsets(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... def __init__( self, *, shard: global___ShardId | None = ..., - vectorset: collections.abc.Iterable[builtins.str] | None = ..., + vectorsets: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["shard", b"shard"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["shard", b"shard", "vectorset", b"vectorset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["shard", b"shard"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["shard", b"shard", "vectorsets", b"vectorsets"]) -> None: ... global___VectorSetList = VectorSetList -@typing.final +@typing_extensions.final class IndexParagraphs(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ParagraphsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -523,24 +575,23 @@ class IndexParagraphs(google.protobuf.message.Message): key: builtins.str = ..., value: global___IndexParagraph | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PARAGRAPHS_FIELD_NUMBER: builtins.int @property def paragraphs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___IndexParagraph]: """id of the paragraph f"{self.rid}/{field_key}/{paragraph.start}-{paragraph.end}" """ - def __init__( self, *, paragraphs: collections.abc.Mapping[builtins.str, global___IndexParagraph] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["paragraphs", b"paragraphs"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["paragraphs", b"paragraphs"]) -> None: ... global___IndexParagraphs = IndexParagraphs -@typing.final +@typing_extensions.final class Resource(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -548,7 +599,7 @@ class Resource(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ResourceStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Resource._ResourceStatus.ValueType], builtins.type): + class _ResourceStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Resource._ResourceStatus.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PROCESSED: Resource._ResourceStatus.ValueType # 0 EMPTY: Resource._ResourceStatus.ValueType # 1 @@ -567,7 +618,7 @@ class Resource(google.protobuf.message.Message): BLOCKED: Resource.ResourceStatus.ValueType # 5 EXPIRED: Resource.ResourceStatus.ValueType # 6 - @typing.final + @typing_extensions.final class TextsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -582,10 +633,10 @@ class Resource(google.protobuf.message.Message): key: builtins.str = ..., value: global___TextInformation | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class ParagraphsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -600,10 +651,10 @@ class Resource(google.protobuf.message.Message): key: builtins.str = ..., value: global___IndexParagraphs | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class VectorsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -618,10 +669,10 @@ class Resource(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.utils_pb2.UserVectors | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class VectorsToDeleteEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -636,8 +687,8 @@ class Resource(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.utils_pb2.UserVectorsList | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... RESOURCE_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int @@ -652,9 +703,6 @@ class Resource(google.protobuf.message.Message): VECTORS_FIELD_NUMBER: builtins.int VECTORS_TO_DELETE_FIELD_NUMBER: builtins.int SECURITY_FIELD_NUMBER: builtins.int - status: global___Resource.ResourceStatus.ValueType - """Tantivy doc""" - shard_id: builtins.str @property def resource(self) -> global___ResourceID: ... @property @@ -664,20 +712,19 @@ class Resource(google.protobuf.message.Message): """Doc index Tantivy doc filled by field allways full """ - @property def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """Key is RID/FIELDID Document labels always serialized full """ - + status: global___Resource.ResourceStatus.ValueType + """Tantivy doc""" @property def paragraphs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___IndexParagraphs]: """Paragraph Paragraphs by field """ - @property def paragraphs_to_delete(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property @@ -685,15 +732,13 @@ class Resource(google.protobuf.message.Message): @property def relations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.utils_pb2.Relation]: """Relations""" - + shard_id: builtins.str @property def vectors(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.utils_pb2.UserVectors]: """vectorset is the key""" - @property def vectors_to_delete(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.utils_pb2.UserVectorsList]: """Vectorset prefix vector id""" - @property def security(self) -> nucliadb_protos.utils_pb2.Security: ... def __init__( @@ -713,13 +758,13 @@ class Resource(google.protobuf.message.Message): vectors_to_delete: collections.abc.Mapping[builtins.str, nucliadb_protos.utils_pb2.UserVectorsList] | None = ..., security: nucliadb_protos.utils_pb2.Security | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_security", b"_security", "metadata", b"metadata", "resource", b"resource", "security", b"security"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_security", b"_security", "labels", b"labels", "metadata", b"metadata", "paragraphs", b"paragraphs", "paragraphs_to_delete", b"paragraphs_to_delete", "relations", b"relations", "resource", b"resource", "security", b"security", "sentences_to_delete", b"sentences_to_delete", "shard_id", b"shard_id", "status", b"status", "texts", b"texts", "vectors", b"vectors", "vectors_to_delete", b"vectors_to_delete"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_security", b"_security"]) -> typing.Literal["security"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_security", b"_security", "metadata", b"metadata", "resource", b"resource", "security", b"security"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_security", b"_security", "labels", b"labels", "metadata", b"metadata", "paragraphs", b"paragraphs", "paragraphs_to_delete", b"paragraphs_to_delete", "relations", b"relations", "resource", b"resource", "security", b"security", "sentences_to_delete", b"sentences_to_delete", "shard_id", b"shard_id", "status", b"status", "texts", b"texts", "vectors", b"vectors", "vectors_to_delete", b"vectors_to_delete"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_security", b"_security"]) -> typing_extensions.Literal["security"] | None: ... global___Resource = Resource -@typing.final +@typing_extensions.final class ShardMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -733,15 +778,15 @@ class ShardMetadata(google.protobuf.message.Message): kbid: builtins.str = ..., release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid", "release_channel", b"release_channel"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "release_channel", b"release_channel"]) -> None: ... global___ShardMetadata = ShardMetadata -@typing.final +@typing_extensions.final class NodeMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ShardMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -755,9 +800,9 @@ class NodeMetadata(google.protobuf.message.Message): kbid: builtins.str = ..., load_score: builtins.float = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid", "load_score", b"load_score"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "load_score", b"load_score"]) -> None: ... - @typing.final + @typing_extensions.final class ShardsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -772,8 +817,8 @@ class NodeMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: global___NodeMetadata.ShardMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... LOAD_SCORE_FIELD_NUMBER: builtins.int SHARD_COUNT_FIELD_NUMBER: builtins.int @@ -784,12 +829,12 @@ class NodeMetadata(google.protobuf.message.Message): TOTAL_DISK_FIELD_NUMBER: builtins.int load_score: builtins.float shard_count: builtins.int + @property + def shards(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___NodeMetadata.ShardMetadata]: ... node_id: builtins.str primary_node_id: builtins.str available_disk: builtins.int total_disk: builtins.int - @property - def shards(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___NodeMetadata.ShardMetadata]: ... def __init__( self, *, @@ -801,8 +846,8 @@ class NodeMetadata(google.protobuf.message.Message): available_disk: builtins.int = ..., total_disk: builtins.int = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_primary_node_id", b"_primary_node_id", "primary_node_id", b"primary_node_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_primary_node_id", b"_primary_node_id", "available_disk", b"available_disk", "load_score", b"load_score", "node_id", b"node_id", "primary_node_id", b"primary_node_id", "shard_count", b"shard_count", "shards", b"shards", "total_disk", b"total_disk"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_primary_node_id", b"_primary_node_id"]) -> typing.Literal["primary_node_id"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_primary_node_id", b"_primary_node_id", "primary_node_id", b"primary_node_id"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_primary_node_id", b"_primary_node_id", "available_disk", b"available_disk", "load_score", b"load_score", "node_id", b"node_id", "primary_node_id", b"primary_node_id", "shard_count", b"shard_count", "shards", b"shards", "total_disk", b"total_disk"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_primary_node_id", b"_primary_node_id"]) -> typing_extensions.Literal["primary_node_id"] | None: ... global___NodeMetadata = NodeMetadata diff --git a/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.py b/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.py index 4593a74e50..278d977d2c 100644 --- a/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/nodewriter.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool @@ -20,7 +20,7 @@ from nucliadb_protos.noderesources_pb2 import * -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n nucliadb_protos/nodewriter.proto\x12\nnodewriter\x1a#nucliadb_protos/noderesources.proto\"\xd9\x01\n\x08OpStatus\x12+\n\x06status\x18\x01 \x01(\x0e\x32\x1b.nodewriter.OpStatus.Status\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t\x12\x17\n\x0b\x66ield_count\x18\x03 \x01(\x04\x42\x02\x18\x01\x12\x1b\n\x0fparagraph_count\x18\x05 \x01(\x04\x42\x02\x18\x01\x12\x1a\n\x0esentence_count\x18\x06 \x01(\x04\x42\x02\x18\x01\x12\x14\n\x08shard_id\x18\x04 \x01(\tB\x02\x18\x01\"(\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"\x86\x02\n\x0cIndexMessage\x12\x0c\n\x04node\x18\x01 \x01(\t\x12\r\n\x05shard\x18\x02 \x01(\t\x12\x0c\n\x04txid\x18\x03 \x01(\x04\x12\x10\n\x08resource\x18\x04 \x01(\t\x12,\n\x0btypemessage\x18\x05 \x01(\x0e\x32\x17.nodewriter.TypeMessage\x12\x12\n\nreindex_id\x18\x06 \x01(\t\x12\x16\n\tpartition\x18\x07 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x0bstorage_key\x18\x08 \x01(\t\x12\x0c\n\x04kbid\x18\t \x01(\t\x12.\n\x06source\x18\n \x01(\x0e\x32\x1e.nodewriter.IndexMessageSourceB\x0c\n\n_partition\"x\n\x18GarbageCollectorResponse\x12;\n\x06status\x18\x01 \x01(\x0e\x32+.nodewriter.GarbageCollectorResponse.Status\"\x1f\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\r\n\tTRY_LATER\x10\x01\"\x97\x01\n\x0fNewShardRequest\x12+\n\nsimilarity\x18\x01 \x01(\x0e\x32\x17.utils.VectorSimilarity\x12\x0c\n\x04kbid\x18\x02 \x01(\t\x12.\n\x0frelease_channel\x18\x03 \x01(\x0e\x32\x15.utils.ReleaseChannel\x12\x19\n\x11normalize_vectors\x18\x04 \x01(\x08\"j\n\x13NewVectorSetRequest\x12&\n\x02id\x18\x01 \x01(\x0b\x32\x1a.noderesources.VectorSetID\x12+\n\nsimilarity\x18\x02 \x01(\x0e\x32\x17.utils.VectorSimilarity\"\x92\x01\n\rMergeResponse\x12\x35\n\x06status\x18\x01 \x01(\x0e\x32%.nodewriter.MergeResponse.MergeStatus\x12\x17\n\x0fmerged_segments\x18\x02 \x01(\r\x12\x1a\n\x12remaining_segments\x18\x03 \x01(\r\"\x15\n\x0bMergeStatus\x12\x06\n\x02OK\x10\x00*)\n\x0bTypeMessage\x12\x0c\n\x08\x43REATION\x10\x00\x12\x0c\n\x08\x44\x45LETION\x10\x01*/\n\x12IndexMessageSource\x12\r\n\tPROCESSOR\x10\x00\x12\n\n\x06WRITER\x10\x01\x32\xd1\x06\n\nNodeWriter\x12\x46\n\x08NewShard\x12\x1b.nodewriter.NewShardRequest\x1a\x1b.noderesources.ShardCreated\"\x00\x12?\n\x0b\x44\x65leteShard\x12\x16.noderesources.ShardId\x1a\x16.noderesources.ShardId\"\x00\x12\x42\n\nListShards\x12\x19.noderesources.EmptyQuery\x1a\x17.noderesources.ShardIds\"\x00\x12\x44\n\x02GC\x12\x16.noderesources.ShardId\x1a$.nodewriter.GarbageCollectorResponse\"\x00\x12<\n\x05Merge\x12\x16.noderesources.ShardId\x1a\x19.nodewriter.MergeResponse\"\x00\x12>\n\x0bSetResource\x12\x17.noderesources.Resource\x1a\x14.nodewriter.OpStatus\"\x00\x12J\n\x16SetResourceFromStorage\x12\x18.nodewriter.IndexMessage\x1a\x14.nodewriter.OpStatus\"\x00\x12\x43\n\x0eRemoveResource\x12\x19.noderesources.ResourceID\x1a\x14.nodewriter.OpStatus\"\x00\x12G\n\x0c\x41\x64\x64VectorSet\x12\x1f.nodewriter.NewVectorSetRequest\x1a\x14.nodewriter.OpStatus\"\x00\x12\x45\n\x0fRemoveVectorSet\x12\x1a.noderesources.VectorSetID\x1a\x14.nodewriter.OpStatus\"\x00\x12H\n\x0eListVectorSets\x12\x16.noderesources.ShardId\x1a\x1c.noderesources.VectorSetList\"\x00\x12G\n\x0bGetMetadata\x12\x19.noderesources.EmptyQuery\x1a\x1b.noderesources.NodeMetadata\"\x00P\x00\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n nucliadb_protos/nodewriter.proto\x12\nnodewriter\x1a#nucliadb_protos/noderesources.proto\"\xd9\x01\n\x08OpStatus\x12+\n\x06status\x18\x01 \x01(\x0e\x32\x1b.nodewriter.OpStatus.Status\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t\x12\x17\n\x0b\x66ield_count\x18\x03 \x01(\x04\x42\x02\x18\x01\x12\x1b\n\x0fparagraph_count\x18\x05 \x01(\x04\x42\x02\x18\x01\x12\x1a\n\x0esentence_count\x18\x06 \x01(\x04\x42\x02\x18\x01\x12\x14\n\x08shard_id\x18\x04 \x01(\tB\x02\x18\x01\"(\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"\x86\x02\n\x0cIndexMessage\x12\x0c\n\x04node\x18\x01 \x01(\t\x12\r\n\x05shard\x18\x02 \x01(\t\x12\x0c\n\x04txid\x18\x03 \x01(\x04\x12\x10\n\x08resource\x18\x04 \x01(\t\x12,\n\x0btypemessage\x18\x05 \x01(\x0e\x32\x17.nodewriter.TypeMessage\x12\x12\n\nreindex_id\x18\x06 \x01(\t\x12\x16\n\tpartition\x18\x07 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x0bstorage_key\x18\x08 \x01(\t\x12\x0c\n\x04kbid\x18\t \x01(\t\x12.\n\x06source\x18\n \x01(\x0e\x32\x1e.nodewriter.IndexMessageSourceB\x0c\n\n_partition\"x\n\x18GarbageCollectorResponse\x12;\n\x06status\x18\x01 \x01(\x0e\x32+.nodewriter.GarbageCollectorResponse.Status\"\x1f\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\r\n\tTRY_LATER\x10\x01\"\x97\x01\n\x0fNewShardRequest\x12+\n\nsimilarity\x18\x01 \x01(\x0e\x32\x17.utils.VectorSimilarity\x12\x0c\n\x04kbid\x18\x02 \x01(\t\x12.\n\x0frelease_channel\x18\x03 \x01(\x0e\x32\x15.utils.ReleaseChannel\x12\x19\n\x11normalize_vectors\x18\x04 \x01(\x08\"\x85\x01\n\x13NewVectorSetRequest\x12&\n\x02id\x18\x01 \x01(\x0b\x32\x1a.noderesources.VectorSetID\x12+\n\nsimilarity\x18\x02 \x01(\x0e\x32\x17.utils.VectorSimilarity\x12\x19\n\x11normalize_vectors\x18\x03 \x01(\x08\"\x92\x01\n\rMergeResponse\x12\x35\n\x06status\x18\x01 \x01(\x0e\x32%.nodewriter.MergeResponse.MergeStatus\x12\x17\n\x0fmerged_segments\x18\x02 \x01(\r\x12\x1a\n\x12remaining_segments\x18\x03 \x01(\r\"\x15\n\x0bMergeStatus\x12\x06\n\x02OK\x10\x00*)\n\x0bTypeMessage\x12\x0c\n\x08\x43REATION\x10\x00\x12\x0c\n\x08\x44\x45LETION\x10\x01*/\n\x12IndexMessageSource\x12\r\n\tPROCESSOR\x10\x00\x12\n\n\x06WRITER\x10\x01\x32\xd1\x06\n\nNodeWriter\x12\x46\n\x08NewShard\x12\x1b.nodewriter.NewShardRequest\x1a\x1b.noderesources.ShardCreated\"\x00\x12?\n\x0b\x44\x65leteShard\x12\x16.noderesources.ShardId\x1a\x16.noderesources.ShardId\"\x00\x12\x42\n\nListShards\x12\x19.noderesources.EmptyQuery\x1a\x17.noderesources.ShardIds\"\x00\x12\x44\n\x02GC\x12\x16.noderesources.ShardId\x1a$.nodewriter.GarbageCollectorResponse\"\x00\x12<\n\x05Merge\x12\x16.noderesources.ShardId\x1a\x19.nodewriter.MergeResponse\"\x00\x12>\n\x0bSetResource\x12\x17.noderesources.Resource\x1a\x14.nodewriter.OpStatus\"\x00\x12J\n\x16SetResourceFromStorage\x12\x18.nodewriter.IndexMessage\x1a\x14.nodewriter.OpStatus\"\x00\x12\x43\n\x0eRemoveResource\x12\x19.noderesources.ResourceID\x1a\x14.nodewriter.OpStatus\"\x00\x12G\n\x0c\x41\x64\x64VectorSet\x12\x1f.nodewriter.NewVectorSetRequest\x1a\x14.nodewriter.OpStatus\"\x00\x12\x45\n\x0fRemoveVectorSet\x12\x1a.noderesources.VectorSetID\x1a\x14.nodewriter.OpStatus\"\x00\x12H\n\x0eListVectorSets\x12\x16.noderesources.ShardId\x1a\x1c.noderesources.VectorSetList\"\x00\x12G\n\x0bGetMetadata\x12\x19.noderesources.EmptyQuery\x1a\x1b.noderesources.NodeMetadata\"\x00P\x00\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -35,10 +35,10 @@ _globals['_OPSTATUS'].fields_by_name['sentence_count']._serialized_options = b'\030\001' _globals['_OPSTATUS'].fields_by_name['shard_id']._options = None _globals['_OPSTATUS'].fields_by_name['shard_id']._serialized_options = b'\030\001' - _globals['_TYPEMESSAGE']._serialized_start=1103 - _globals['_TYPEMESSAGE']._serialized_end=1144 - _globals['_INDEXMESSAGESOURCE']._serialized_start=1146 - _globals['_INDEXMESSAGESOURCE']._serialized_end=1193 + _globals['_TYPEMESSAGE']._serialized_start=1131 + _globals['_TYPEMESSAGE']._serialized_end=1172 + _globals['_INDEXMESSAGESOURCE']._serialized_start=1174 + _globals['_INDEXMESSAGESOURCE']._serialized_end=1221 _globals['_OPSTATUS']._serialized_start=86 _globals['_OPSTATUS']._serialized_end=303 _globals['_OPSTATUS_STATUS']._serialized_start=263 @@ -51,12 +51,12 @@ _globals['_GARBAGECOLLECTORRESPONSE_STATUS']._serialized_end=690 _globals['_NEWSHARDREQUEST']._serialized_start=693 _globals['_NEWSHARDREQUEST']._serialized_end=844 - _globals['_NEWVECTORSETREQUEST']._serialized_start=846 - _globals['_NEWVECTORSETREQUEST']._serialized_end=952 - _globals['_MERGERESPONSE']._serialized_start=955 - _globals['_MERGERESPONSE']._serialized_end=1101 - _globals['_MERGERESPONSE_MERGESTATUS']._serialized_start=1080 - _globals['_MERGERESPONSE_MERGESTATUS']._serialized_end=1101 - _globals['_NODEWRITER']._serialized_start=1196 - _globals['_NODEWRITER']._serialized_end=2045 + _globals['_NEWVECTORSETREQUEST']._serialized_start=847 + _globals['_NEWVECTORSETREQUEST']._serialized_end=980 + _globals['_MERGERESPONSE']._serialized_start=983 + _globals['_MERGERESPONSE']._serialized_end=1129 + _globals['_MERGERESPONSE_MERGESTATUS']._serialized_start=1108 + _globals['_MERGERESPONSE_MERGESTATUS']._serialized_end=1129 + _globals['_NODEWRITER']._serialized_start=1224 + _globals['_NODEWRITER']._serialized_end=2073 # @@protoc_insertion_point(module_scope) diff --git a/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.pyi index 1fd0a8212e..19c0c69d02 100644 --- a/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import google.protobuf.descriptor import google.protobuf.internal.enum_type_wrapper @@ -38,6 +37,7 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) DESCRIPTOR: google.protobuf.descriptor.FileDescriptor @@ -73,7 +73,7 @@ PROCESSOR: IndexMessageSource.ValueType # 0 WRITER: IndexMessageSource.ValueType # 1 global___IndexMessageSource = IndexMessageSource -@typing.final +@typing_extensions.final class OpStatus(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -81,7 +81,7 @@ class OpStatus(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpStatus._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpStatus._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: OpStatus._Status.ValueType # 0 WARNING: OpStatus._Status.ValueType # 1 @@ -114,11 +114,11 @@ class OpStatus(google.protobuf.message.Message): sentence_count: builtins.int = ..., shard_id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["detail", b"detail", "field_count", b"field_count", "paragraph_count", b"paragraph_count", "sentence_count", b"sentence_count", "shard_id", b"shard_id", "status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["detail", b"detail", "field_count", b"field_count", "paragraph_count", b"paragraph_count", "sentence_count", b"sentence_count", "shard_id", b"shard_id", "status", b"status"]) -> None: ... global___OpStatus = OpStatus -@typing.final +@typing_extensions.final class IndexMessage(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -157,13 +157,13 @@ class IndexMessage(google.protobuf.message.Message): kbid: builtins.str = ..., source: global___IndexMessageSource.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_partition", b"_partition", "partition", b"partition"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_partition", b"_partition", "kbid", b"kbid", "node", b"node", "partition", b"partition", "reindex_id", b"reindex_id", "resource", b"resource", "shard", b"shard", "source", b"source", "storage_key", b"storage_key", "txid", b"txid", "typemessage", b"typemessage"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_partition", b"_partition"]) -> typing.Literal["partition"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["_partition", b"_partition", "partition", b"partition"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_partition", b"_partition", "kbid", b"kbid", "node", b"node", "partition", b"partition", "reindex_id", b"reindex_id", "resource", b"resource", "shard", b"shard", "source", b"source", "storage_key", b"storage_key", "txid", b"txid", "typemessage", b"typemessage"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_partition", b"_partition"]) -> typing_extensions.Literal["partition"] | None: ... global___IndexMessage = IndexMessage -@typing.final +@typing_extensions.final class GarbageCollectorResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -171,7 +171,7 @@ class GarbageCollectorResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GarbageCollectorResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GarbageCollectorResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: GarbageCollectorResponse._Status.ValueType # 0 TRY_LATER: GarbageCollectorResponse._Status.ValueType # 1 @@ -187,11 +187,11 @@ class GarbageCollectorResponse(google.protobuf.message.Message): *, status: global___GarbageCollectorResponse.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___GarbageCollectorResponse = GarbageCollectorResponse -@typing.final +@typing_extensions.final class NewShardRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -212,31 +212,35 @@ class NewShardRequest(google.protobuf.message.Message): release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType = ..., normalize_vectors: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid", "normalize_vectors", b"normalize_vectors", "release_channel", b"release_channel", "similarity", b"similarity"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "normalize_vectors", b"normalize_vectors", "release_channel", b"release_channel", "similarity", b"similarity"]) -> None: ... global___NewShardRequest = NewShardRequest -@typing.final +@typing_extensions.final class NewVectorSetRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor ID_FIELD_NUMBER: builtins.int SIMILARITY_FIELD_NUMBER: builtins.int - similarity: nucliadb_protos.utils_pb2.VectorSimilarity.ValueType + NORMALIZE_VECTORS_FIELD_NUMBER: builtins.int @property def id(self) -> nucliadb_protos.noderesources_pb2.VectorSetID: ... + similarity: nucliadb_protos.utils_pb2.VectorSimilarity.ValueType + normalize_vectors: builtins.bool + """indicates whether the shard should normalize vectors on indexing or not""" def __init__( self, *, id: nucliadb_protos.noderesources_pb2.VectorSetID | None = ..., similarity: nucliadb_protos.utils_pb2.VectorSimilarity.ValueType = ..., + normalize_vectors: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["id", b"id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "similarity", b"similarity"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["id", b"id"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "normalize_vectors", b"normalize_vectors", "similarity", b"similarity"]) -> None: ... global___NewVectorSetRequest = NewVectorSetRequest -@typing.final +@typing_extensions.final class MergeResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -244,7 +248,7 @@ class MergeResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MergeStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MergeResponse._MergeStatus.ValueType], builtins.type): + class _MergeStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MergeResponse._MergeStatus.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: MergeResponse._MergeStatus.ValueType # 0 @@ -264,6 +268,6 @@ class MergeResponse(google.protobuf.message.Message): merged_segments: builtins.int = ..., remaining_segments: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["merged_segments", b"merged_segments", "remaining_segments", b"remaining_segments", "status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["merged_segments", b"merged_segments", "remaining_segments", b"remaining_segments", "status", b"status"]) -> None: ... global___MergeResponse = MergeResponse diff --git a/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2_grpc.pyi b/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2_grpc.pyi index 8acb463a51..46ae9556ba 100644 --- a/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2_grpc.pyi +++ b/nucliadb_protos/python/nucliadb_protos/nodewriter_pb2_grpc.pyi @@ -2,14 +2,10 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import abc -import collections.abc import grpc -import grpc.aio import nucliadb_protos.noderesources_pb2 import nucliadb_protos.nodewriter_pb2 -import typing from nucliadb_protos.noderesources_pb2 import ( EmptyQuery as EmptyQuery, EmptyResponse as EmptyResponse, @@ -32,221 +28,132 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) -_T = typing.TypeVar("_T") - -class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... - -class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] - ... - class NodeWriterStub: - def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + def __init__(self, channel: grpc.Channel) -> None: ... NewShard: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodewriter_pb2.NewShardRequest, nucliadb_protos.noderesources_pb2.ShardCreated, ] - DeleteShard: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.noderesources_pb2.ShardId, ] - ListShards: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.EmptyQuery, nucliadb_protos.noderesources_pb2.ShardIds, ] - GC: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodewriter_pb2.GarbageCollectorResponse, ] - Merge: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.nodewriter_pb2.MergeResponse, ] - SetResource: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.Resource, nucliadb_protos.nodewriter_pb2.OpStatus, ] - SetResourceFromStorage: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodewriter_pb2.IndexMessage, nucliadb_protos.nodewriter_pb2.OpStatus, ] - RemoveResource: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ResourceID, nucliadb_protos.nodewriter_pb2.OpStatus, ] - AddVectorSet: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.nodewriter_pb2.NewVectorSetRequest, nucliadb_protos.nodewriter_pb2.OpStatus, ] - RemoveVectorSet: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.VectorSetID, nucliadb_protos.nodewriter_pb2.OpStatus, ] - ListVectorSets: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.ShardId, nucliadb_protos.noderesources_pb2.VectorSetList, ] - GetMetadata: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.EmptyQuery, nucliadb_protos.noderesources_pb2.NodeMetadata, ] -class NodeWriterAsyncStub: - NewShard: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodewriter_pb2.NewShardRequest, - nucliadb_protos.noderesources_pb2.ShardCreated, - ] - - DeleteShard: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.noderesources_pb2.ShardId, - ] - - ListShards: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.EmptyQuery, - nucliadb_protos.noderesources_pb2.ShardIds, - ] - - GC: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodewriter_pb2.GarbageCollectorResponse, - ] - - Merge: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.nodewriter_pb2.MergeResponse, - ] - - SetResource: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.Resource, - nucliadb_protos.nodewriter_pb2.OpStatus, - ] - - SetResourceFromStorage: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodewriter_pb2.IndexMessage, - nucliadb_protos.nodewriter_pb2.OpStatus, - ] - - RemoveResource: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ResourceID, - nucliadb_protos.nodewriter_pb2.OpStatus, - ] - - AddVectorSet: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.nodewriter_pb2.NewVectorSetRequest, - nucliadb_protos.nodewriter_pb2.OpStatus, - ] - - RemoveVectorSet: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.VectorSetID, - nucliadb_protos.nodewriter_pb2.OpStatus, - ] - - ListVectorSets: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.ShardId, - nucliadb_protos.noderesources_pb2.VectorSetList, - ] - - GetMetadata: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.EmptyQuery, - nucliadb_protos.noderesources_pb2.NodeMetadata, - ] - class NodeWriterServicer(metaclass=abc.ABCMeta): @abc.abstractmethod def NewShard( self, request: nucliadb_protos.nodewriter_pb2.NewShardRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.ShardCreated, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.ShardCreated]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.ShardCreated: ... @abc.abstractmethod def DeleteShard( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.ShardId, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.ShardId]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.ShardId: ... @abc.abstractmethod def ListShards( self, request: nucliadb_protos.noderesources_pb2.EmptyQuery, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.ShardIds, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.ShardIds]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.ShardIds: ... @abc.abstractmethod def GC( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.GarbageCollectorResponse, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.GarbageCollectorResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.GarbageCollectorResponse: ... @abc.abstractmethod def Merge( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.MergeResponse, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.MergeResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.MergeResponse: ... @abc.abstractmethod def SetResource( self, request: nucliadb_protos.noderesources_pb2.Resource, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.OpStatus, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.OpStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.OpStatus: ... @abc.abstractmethod def SetResourceFromStorage( self, request: nucliadb_protos.nodewriter_pb2.IndexMessage, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.OpStatus, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.OpStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.OpStatus: ... @abc.abstractmethod def RemoveResource( self, request: nucliadb_protos.noderesources_pb2.ResourceID, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.OpStatus, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.OpStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.OpStatus: ... @abc.abstractmethod def AddVectorSet( self, request: nucliadb_protos.nodewriter_pb2.NewVectorSetRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.OpStatus, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.OpStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.OpStatus: ... @abc.abstractmethod def RemoveVectorSet( self, request: nucliadb_protos.noderesources_pb2.VectorSetID, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.nodewriter_pb2.OpStatus, collections.abc.Awaitable[nucliadb_protos.nodewriter_pb2.OpStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.nodewriter_pb2.OpStatus: ... @abc.abstractmethod def ListVectorSets( self, request: nucliadb_protos.noderesources_pb2.ShardId, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.VectorSetList, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.VectorSetList]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.VectorSetList: ... @abc.abstractmethod def GetMetadata( self, request: nucliadb_protos.noderesources_pb2.EmptyQuery, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.NodeMetadata, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.NodeMetadata]]: ... + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.NodeMetadata: ... -def add_NodeWriterServicer_to_server(servicer: NodeWriterServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... +def add_NodeWriterServicer_to_server(servicer: NodeWriterServicer, server: grpc.Server) -> None: ... diff --git a/nucliadb_protos/python/nucliadb_protos/replication_pb2.py b/nucliadb_protos/python/nucliadb_protos/replication_pb2.py index cfd062fc3f..4fec4baaea 100644 --- a/nucliadb_protos/python/nucliadb_protos/replication_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/replication_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/replication.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/replication_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/replication_pb2.pyi index 63c49bb1a3..b7f1135554 100644 --- a/nucliadb_protos/python/nucliadb_protos/replication_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/replication_pb2.pyi @@ -2,14 +2,18 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.message import nucliadb_protos.utils_pb2 -import typing +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions from nucliadb_protos.noderesources_pb2 import ( EmptyQuery as EmptyQuery, EmptyResponse as EmptyResponse, @@ -32,11 +36,12 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class PrimaryShardReplicationState(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -65,11 +70,11 @@ class PrimaryShardReplicationState(google.protobuf.message.Message): normalize_vectors: builtins.bool = ..., release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["generation_id", b"generation_id", "kbid", b"kbid", "normalize_vectors", b"normalize_vectors", "release_channel", b"release_channel", "shard_id", b"shard_id", "similarity", b"similarity"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["generation_id", b"generation_id", "kbid", b"kbid", "normalize_vectors", b"normalize_vectors", "release_channel", b"release_channel", "shard_id", b"shard_id", "similarity", b"similarity"]) -> None: ... global___PrimaryShardReplicationState = PrimaryShardReplicationState -@typing.final +@typing_extensions.final class SecondaryShardReplicationState(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -86,11 +91,11 @@ class SecondaryShardReplicationState(google.protobuf.message.Message): shard_id: builtins.str = ..., generation_id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["generation_id", b"generation_id", "shard_id", b"shard_id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["generation_id", b"generation_id", "shard_id", b"shard_id"]) -> None: ... global___SecondaryShardReplicationState = SecondaryShardReplicationState -@typing.final +@typing_extensions.final class SecondaryCheckReplicationStateRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -105,22 +110,22 @@ class SecondaryCheckReplicationStateRequest(google.protobuf.message.Message): secondary_id: builtins.str = ..., shard_states: collections.abc.Iterable[global___SecondaryShardReplicationState] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["secondary_id", b"secondary_id", "shard_states", b"shard_states"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["secondary_id", b"secondary_id", "shard_states", b"shard_states"]) -> None: ... global___SecondaryCheckReplicationStateRequest = SecondaryCheckReplicationStateRequest -@typing.final +@typing_extensions.final class PrimaryCheckReplicationStateResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor SHARD_STATES_FIELD_NUMBER: builtins.int SHARDS_TO_REMOVE_FIELD_NUMBER: builtins.int PRIMARY_ID_FIELD_NUMBER: builtins.int - primary_id: builtins.str @property def shard_states(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PrimaryShardReplicationState]: ... @property def shards_to_remove(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + primary_id: builtins.str def __init__( self, *, @@ -128,11 +133,11 @@ class PrimaryCheckReplicationStateResponse(google.protobuf.message.Message): shards_to_remove: collections.abc.Iterable[builtins.str] | None = ..., primary_id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["primary_id", b"primary_id", "shard_states", b"shard_states", "shards_to_remove", b"shards_to_remove"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["primary_id", b"primary_id", "shard_states", b"shard_states", "shards_to_remove", b"shards_to_remove"]) -> None: ... global___PrimaryCheckReplicationStateResponse = PrimaryCheckReplicationStateResponse -@typing.final +@typing_extensions.final class SegmentIds(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -144,15 +149,15 @@ class SegmentIds(google.protobuf.message.Message): *, items: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["items", b"items"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["items", b"items"]) -> None: ... global___SegmentIds = SegmentIds -@typing.final +@typing_extensions.final class ReplicateShardRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ExistingSegmentIdsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -167,18 +172,17 @@ class ReplicateShardRequest(google.protobuf.message.Message): key: builtins.str = ..., value: global___SegmentIds | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... SHARD_ID_FIELD_NUMBER: builtins.int EXISTING_SEGMENT_IDS_FIELD_NUMBER: builtins.int CHUNK_SIZE_FIELD_NUMBER: builtins.int shard_id: builtins.str - chunk_size: builtins.int @property def existing_segment_ids(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___SegmentIds]: """list of existing segment ids so we replicate same shards again""" - + chunk_size: builtins.int def __init__( self, *, @@ -186,11 +190,11 @@ class ReplicateShardRequest(google.protobuf.message.Message): existing_segment_ids: collections.abc.Mapping[builtins.str, global___SegmentIds] | None = ..., chunk_size: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["chunk_size", b"chunk_size", "existing_segment_ids", b"existing_segment_ids", "shard_id", b"shard_id"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["chunk_size", b"chunk_size", "existing_segment_ids", b"existing_segment_ids", "shard_id", b"shard_id"]) -> None: ... global___ReplicateShardRequest = ReplicateShardRequest -@typing.final +@typing_extensions.final class ReplicateShardResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -216,6 +220,6 @@ class ReplicateShardResponse(google.protobuf.message.Message): read_position: builtins.int = ..., total_size: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["chunk", b"chunk", "data", b"data", "filepath", b"filepath", "generation_id", b"generation_id", "read_position", b"read_position", "total_size", b"total_size"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["chunk", b"chunk", "data", b"data", "filepath", b"filepath", "generation_id", b"generation_id", "read_position", b"read_position", "total_size", b"total_size"]) -> None: ... global___ReplicateShardResponse = ReplicateShardResponse diff --git a/nucliadb_protos/python/nucliadb_protos/replication_pb2_grpc.pyi b/nucliadb_protos/python/nucliadb_protos/replication_pb2_grpc.pyi index 739a270b73..e38d463a8c 100644 --- a/nucliadb_protos/python/nucliadb_protos/replication_pb2_grpc.pyi +++ b/nucliadb_protos/python/nucliadb_protos/replication_pb2_grpc.pyi @@ -2,14 +2,11 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import abc import collections.abc import grpc -import grpc.aio import nucliadb_protos.noderesources_pb2 import nucliadb_protos.replication_pb2 -import typing from nucliadb_protos.noderesources_pb2 import ( EmptyQuery as EmptyQuery, EmptyResponse as EmptyResponse, @@ -32,71 +29,44 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) -_T = typing.TypeVar("_T") - -class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... - -class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] - ... - class ReplicationServiceStub: - def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + def __init__(self, channel: grpc.Channel) -> None: ... CheckReplicationState: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.replication_pb2.SecondaryCheckReplicationStateRequest, nucliadb_protos.replication_pb2.PrimaryCheckReplicationStateResponse, ] """Shard replication RPCs""" - ReplicateShard: grpc.UnaryStreamMultiCallable[ nucliadb_protos.replication_pb2.ReplicateShardRequest, nucliadb_protos.replication_pb2.ReplicateShardResponse, ] - GetMetadata: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.noderesources_pb2.EmptyQuery, nucliadb_protos.noderesources_pb2.NodeMetadata, ] -class ReplicationServiceAsyncStub: - CheckReplicationState: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.replication_pb2.SecondaryCheckReplicationStateRequest, - nucliadb_protos.replication_pb2.PrimaryCheckReplicationStateResponse, - ] - """Shard replication RPCs""" - - ReplicateShard: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.replication_pb2.ReplicateShardRequest, - nucliadb_protos.replication_pb2.ReplicateShardResponse, - ] - - GetMetadata: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.noderesources_pb2.EmptyQuery, - nucliadb_protos.noderesources_pb2.NodeMetadata, - ] - class ReplicationServiceServicer(metaclass=abc.ABCMeta): @abc.abstractmethod def CheckReplicationState( self, request: nucliadb_protos.replication_pb2.SecondaryCheckReplicationStateRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.replication_pb2.PrimaryCheckReplicationStateResponse, collections.abc.Awaitable[nucliadb_protos.replication_pb2.PrimaryCheckReplicationStateResponse]]: + context: grpc.ServicerContext, + ) -> nucliadb_protos.replication_pb2.PrimaryCheckReplicationStateResponse: """Shard replication RPCs""" - @abc.abstractmethod def ReplicateShard( self, request: nucliadb_protos.replication_pb2.ReplicateShardRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.replication_pb2.ReplicateShardResponse], collections.abc.AsyncIterator[nucliadb_protos.replication_pb2.ReplicateShardResponse]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.replication_pb2.ReplicateShardResponse]: ... @abc.abstractmethod def GetMetadata( self, request: nucliadb_protos.noderesources_pb2.EmptyQuery, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.noderesources_pb2.NodeMetadata, collections.abc.Awaitable[nucliadb_protos.noderesources_pb2.NodeMetadata]]: ... + context: grpc.ServicerContext, + ) -> nucliadb_protos.noderesources_pb2.NodeMetadata: ... -def add_ReplicationServiceServicer_to_server(servicer: ReplicationServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... +def add_ReplicationServiceServicer_to_server(servicer: ReplicationServiceServicer, server: grpc.Server) -> None: ... diff --git a/nucliadb_protos/python/nucliadb_protos/resources_pb2.py b/nucliadb_protos/python/nucliadb_protos/resources_pb2.py index b5c1f62011..2d28a98116 100644 --- a/nucliadb_protos/python/nucliadb_protos/resources_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/resources_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/resources.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/resources_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/resources_pb2.pyi index d841afe53e..12a40f5795 100644 --- a/nucliadb_protos/python/nucliadb_protos/resources_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/resources_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -71,7 +70,7 @@ GENERIC: FieldType.ValueType # 6 CONVERSATION: FieldType.ValueType # 7 global___FieldType = FieldType -@typing.final +@typing_extensions.final class CloudFile(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -79,7 +78,7 @@ class CloudFile(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CloudFile._Source.ValueType], builtins.type): + class _SourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CloudFile._Source.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor FLAPS: CloudFile._Source.ValueType # 0 GCS: CloudFile._Source.ValueType # 1 @@ -123,11 +122,11 @@ class CloudFile(google.protobuf.message.Message): """Temporal upload information""" offset: builtins.int upload_uri: builtins.str + @property + def parts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... old_uri: builtins.str old_bucket: builtins.str md5: builtins.str - @property - def parts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... def __init__( self, *, @@ -145,11 +144,11 @@ class CloudFile(google.protobuf.message.Message): old_bucket: builtins.str = ..., md5: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bucket_name", b"bucket_name", "content_type", b"content_type", "filename", b"filename", "md5", b"md5", "offset", b"offset", "old_bucket", b"old_bucket", "old_uri", b"old_uri", "parts", b"parts", "resumable_uri", b"resumable_uri", "size", b"size", "source", b"source", "upload_uri", b"upload_uri", "uri", b"uri"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bucket_name", b"bucket_name", "content_type", b"content_type", "filename", b"filename", "md5", b"md5", "offset", b"offset", "old_bucket", b"old_bucket", "old_uri", b"old_uri", "parts", b"parts", "resumable_uri", b"resumable_uri", "size", b"size", "source", b"source", "upload_uri", b"upload_uri", "uri", b"uri"]) -> None: ... global___CloudFile = CloudFile -@typing.final +@typing_extensions.final class Basic(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -157,7 +156,7 @@ class Basic(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _QueueTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Basic._QueueType.ValueType], builtins.type): + class _QueueTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Basic._QueueType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PRIVATE: Basic._QueueType.ValueType # 0 SHARED: Basic._QueueType.ValueType # 1 @@ -190,13 +189,6 @@ class Basic(google.protobuf.message.Message): thumbnail: builtins.str """reference to inner thumbnail""" layout: builtins.str - uuid: builtins.str - """Only for read operations""" - last_seqid: builtins.int - """last processing seqid of the resource""" - last_account_seq: builtins.int - """last processing sequid (non nats) of this resource in the account queue""" - queue: global___Basic.QueueType.ValueType @property def created(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property @@ -206,13 +198,19 @@ class Basic(google.protobuf.message.Message): @property def usermetadata(self) -> global___UserMetadata: """Not Basic""" - @property def fieldmetadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UserFieldMetadata]: ... @property def computedmetadata(self) -> global___ComputedMetadata: ... + uuid: builtins.str + """Only for read operations""" @property def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + last_seqid: builtins.int + """last processing seqid of the resource""" + last_account_seq: builtins.int + """last processing sequid (non nats) of this resource in the account queue""" + queue: global___Basic.QueueType.ValueType def __init__( self, *, @@ -234,12 +232,12 @@ class Basic(google.protobuf.message.Message): last_account_seq: builtins.int = ..., queue: global___Basic.QueueType.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["computedmetadata", b"computedmetadata", "created", b"created", "metadata", b"metadata", "modified", b"modified", "usermetadata", b"usermetadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["computedmetadata", b"computedmetadata", "created", b"created", "fieldmetadata", b"fieldmetadata", "icon", b"icon", "labels", b"labels", "last_account_seq", b"last_account_seq", "last_seqid", b"last_seqid", "layout", b"layout", "metadata", b"metadata", "modified", b"modified", "queue", b"queue", "slug", b"slug", "summary", b"summary", "thumbnail", b"thumbnail", "title", b"title", "usermetadata", b"usermetadata", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["computedmetadata", b"computedmetadata", "created", b"created", "metadata", b"metadata", "modified", b"modified", "usermetadata", b"usermetadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["computedmetadata", b"computedmetadata", "created", b"created", "fieldmetadata", b"fieldmetadata", "icon", b"icon", "labels", b"labels", "last_account_seq", b"last_account_seq", "last_seqid", b"last_seqid", "layout", b"layout", "metadata", b"metadata", "modified", b"modified", "queue", b"queue", "slug", b"slug", "summary", b"summary", "thumbnail", b"thumbnail", "title", b"title", "usermetadata", b"usermetadata", "uuid", b"uuid"]) -> None: ... global___Basic = Basic -@typing.final +@typing_extensions.final class Origin(google.protobuf.message.Message): """Block behaviors""" @@ -249,7 +247,7 @@ class Origin(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Origin._Source.ValueType], builtins.type): + class _SourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Origin._Source.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor WEB: Origin._Source.ValueType # 0 DESKTOP: Origin._Source.ValueType # 1 @@ -260,7 +258,7 @@ class Origin(google.protobuf.message.Message): DESKTOP: Origin.Source.ValueType # 1 API: Origin.Source.ValueType # 2 - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -274,7 +272,7 @@ class Origin(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... SOURCE_FIELD_NUMBER: builtins.int SOURCE_ID_FIELD_NUMBER: builtins.int @@ -290,8 +288,6 @@ class Origin(google.protobuf.message.Message): source: global___Origin.Source.ValueType source_id: builtins.str url: builtins.str - filename: builtins.str - path: builtins.str @property def created(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property @@ -302,8 +298,10 @@ class Origin(google.protobuf.message.Message): def tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property def colaborators(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + filename: builtins.str @property def related(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + path: builtins.str def __init__( self, *, @@ -319,12 +317,12 @@ class Origin(google.protobuf.message.Message): related: collections.abc.Iterable[builtins.str] | None = ..., path: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["created", b"created", "modified", b"modified"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["colaborators", b"colaborators", "created", b"created", "filename", b"filename", "metadata", b"metadata", "modified", b"modified", "path", b"path", "related", b"related", "source", b"source", "source_id", b"source_id", "tags", b"tags", "url", b"url"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created", b"created", "modified", b"modified"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["colaborators", b"colaborators", "created", b"created", "filename", b"filename", "metadata", b"metadata", "modified", b"modified", "path", b"path", "related", b"related", "source", b"source", "source_id", b"source_id", "tags", b"tags", "url", b"url"]) -> None: ... global___Origin = Origin -@typing.final +@typing_extensions.final class Extra(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -336,12 +334,12 @@ class Extra(google.protobuf.message.Message): *, metadata: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> None: ... global___Extra = Extra -@typing.final +@typing_extensions.final class Relations(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -353,11 +351,11 @@ class Relations(google.protobuf.message.Message): *, relations: collections.abc.Iterable[nucliadb_protos.utils_pb2.Relation] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["relations", b"relations"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["relations", b"relations"]) -> None: ... global___Relations = Relations -@typing.final +@typing_extensions.final class MessageContent(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -365,7 +363,7 @@ class MessageContent(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MessageContent._Format.ValueType], builtins.type): + class _FormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MessageContent._Format.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PLAIN: MessageContent._Format.ValueType # 0 HTML: MessageContent._Format.ValueType # 1 @@ -394,11 +392,11 @@ class MessageContent(google.protobuf.message.Message): format: global___MessageContent.Format.ValueType = ..., attachments: collections.abc.Iterable[global___CloudFile] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["attachments", b"attachments", "format", b"format", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["attachments", b"attachments", "format", b"format", "text", b"text"]) -> None: ... global___MessageContent = MessageContent -@typing.final +@typing_extensions.final class Message(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -406,7 +404,7 @@ class Message(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MessageTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Message._MessageType.ValueType], builtins.type): + class _MessageTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Message._MessageType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSET: Message._MessageType.ValueType # 0 QUESTION: Message._MessageType.ValueType # 1 @@ -423,15 +421,15 @@ class Message(google.protobuf.message.Message): CONTENT_FIELD_NUMBER: builtins.int IDENT_FIELD_NUMBER: builtins.int TYPE_FIELD_NUMBER: builtins.int - who: builtins.str - ident: builtins.str - type: global___Message.MessageType.ValueType @property def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + who: builtins.str @property def to(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property def content(self) -> global___MessageContent: ... + ident: builtins.str + type: global___Message.MessageType.ValueType def __init__( self, *, @@ -442,12 +440,12 @@ class Message(google.protobuf.message.Message): ident: builtins.str = ..., type: global___Message.MessageType.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["content", b"content", "timestamp", b"timestamp"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["content", b"content", "ident", b"ident", "timestamp", b"timestamp", "to", b"to", "type", b"type", "who", b"who"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["content", b"content", "timestamp", b"timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content", b"content", "ident", b"ident", "timestamp", b"timestamp", "to", b"to", "type", b"type", "who", b"who"]) -> None: ... global___Message = Message -@typing.final +@typing_extensions.final class Conversation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -459,11 +457,11 @@ class Conversation(google.protobuf.message.Message): *, messages: collections.abc.Iterable[global___Message] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["messages", b"messages"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["messages", b"messages"]) -> None: ... global___Conversation = Conversation -@typing.final +@typing_extensions.final class FieldConversation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -483,11 +481,11 @@ class FieldConversation(google.protobuf.message.Message): size: builtins.int = ..., total: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["pages", b"pages", "size", b"size", "total", b"total"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["pages", b"pages", "size", b"size", "total", b"total"]) -> None: ... global___FieldConversation = FieldConversation -@typing.final +@typing_extensions.final class NestedPosition(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -504,11 +502,11 @@ class NestedPosition(google.protobuf.message.Message): end: builtins.int = ..., page: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "page", b"page", "start", b"start"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "page", b"page", "start", b"start"]) -> None: ... global___NestedPosition = NestedPosition -@typing.final +@typing_extensions.final class NestedListPosition(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -520,15 +518,15 @@ class NestedListPosition(google.protobuf.message.Message): *, positions: collections.abc.Iterable[global___NestedPosition] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["positions", b"positions"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["positions", b"positions"]) -> None: ... global___NestedListPosition = NestedListPosition -@typing.final +@typing_extensions.final class FileExtractedData(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -542,9 +540,9 @@ class FileExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class NestedEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -558,9 +556,9 @@ class FileExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class FileGeneratedEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -575,10 +573,10 @@ class FileExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: global___CloudFile | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class FileRowsPreviewsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -593,10 +591,10 @@ class FileExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: global___RowsPreview | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class NestedPositionEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -611,10 +609,10 @@ class FileExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: global___NestedPosition | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class NestedListPositionEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -629,8 +627,8 @@ class FileExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: global___NestedListPosition | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... LANGUAGE_FIELD_NUMBER: builtins.int MD5_FIELD_NUMBER: builtins.int @@ -647,8 +645,6 @@ class FileExtractedData(google.protobuf.message.Message): NESTED_LIST_POSITION_FIELD_NUMBER: builtins.int language: builtins.str md5: builtins.str - field: builtins.str - icon: builtins.str @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... @property @@ -663,6 +659,8 @@ class FileExtractedData(google.protobuf.message.Message): def file_pages_previews(self) -> global___FilePages: ... @property def file_thumbnail(self) -> global___CloudFile: ... + field: builtins.str + icon: builtins.str @property def nested_position(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___NestedPosition]: ... @property @@ -684,16 +682,16 @@ class FileExtractedData(google.protobuf.message.Message): nested_position: collections.abc.Mapping[builtins.str, global___NestedPosition] | None = ..., nested_list_position: collections.abc.Mapping[builtins.str, global___NestedListPosition] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["file_pages_previews", b"file_pages_previews", "file_preview", b"file_preview", "file_thumbnail", b"file_thumbnail"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "file_generated", b"file_generated", "file_pages_previews", b"file_pages_previews", "file_preview", b"file_preview", "file_rows_previews", b"file_rows_previews", "file_thumbnail", b"file_thumbnail", "icon", b"icon", "language", b"language", "md5", b"md5", "metadata", b"metadata", "nested", b"nested", "nested_list_position", b"nested_list_position", "nested_position", b"nested_position"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["file_pages_previews", b"file_pages_previews", "file_preview", b"file_preview", "file_thumbnail", b"file_thumbnail"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "file_generated", b"file_generated", "file_pages_previews", b"file_pages_previews", "file_preview", b"file_preview", "file_rows_previews", b"file_rows_previews", "file_thumbnail", b"file_thumbnail", "icon", b"icon", "language", b"language", "md5", b"md5", "metadata", b"metadata", "nested", b"nested", "nested_list_position", b"nested_list_position", "nested_position", b"nested_position"]) -> None: ... global___FileExtractedData = FileExtractedData -@typing.final +@typing_extensions.final class LinkExtractedData(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -707,9 +705,9 @@ class LinkExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class FileGeneratedEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -724,8 +722,8 @@ class LinkExtractedData(google.protobuf.message.Message): key: builtins.str = ..., value: global___CloudFile | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... DATE_FIELD_NUMBER: builtins.int LANGUAGE_FIELD_NUMBER: builtins.int @@ -740,28 +738,27 @@ class LinkExtractedData(google.protobuf.message.Message): EMBED_FIELD_NUMBER: builtins.int PDF_STRUCTURE_FIELD_NUMBER: builtins.int FILE_GENERATED_FIELD_NUMBER: builtins.int - language: builtins.str - title: builtins.str - field: builtins.str - description: builtins.str - type: builtins.str - embed: builtins.str @property def date(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + language: builtins.str + title: builtins.str @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... @property def link_thumbnail(self) -> global___CloudFile: ... @property def link_preview(self) -> global___CloudFile: ... + field: builtins.str @property def link_image(self) -> global___CloudFile: ... + description: builtins.str + type: builtins.str + embed: builtins.str @property def pdf_structure(self) -> global___PageStructure: ... @property def file_generated(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___CloudFile]: """The key is the file ID""" - def __init__( self, *, @@ -779,12 +776,12 @@ class LinkExtractedData(google.protobuf.message.Message): pdf_structure: global___PageStructure | None = ..., file_generated: collections.abc.Mapping[builtins.str, global___CloudFile] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["date", b"date", "link_image", b"link_image", "link_preview", b"link_preview", "link_thumbnail", b"link_thumbnail", "pdf_structure", b"pdf_structure"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["date", b"date", "description", b"description", "embed", b"embed", "field", b"field", "file_generated", b"file_generated", "language", b"language", "link_image", b"link_image", "link_preview", b"link_preview", "link_thumbnail", b"link_thumbnail", "metadata", b"metadata", "pdf_structure", b"pdf_structure", "title", b"title", "type", b"type"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["date", b"date", "link_image", b"link_image", "link_preview", b"link_preview", "link_thumbnail", b"link_thumbnail", "pdf_structure", b"pdf_structure"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["date", b"date", "description", b"description", "embed", b"embed", "field", b"field", "file_generated", b"file_generated", "language", b"language", "link_image", b"link_image", "link_preview", b"link_preview", "link_thumbnail", b"link_thumbnail", "metadata", b"metadata", "pdf_structure", b"pdf_structure", "title", b"title", "type", b"type"]) -> None: ... global___LinkExtractedData = LinkExtractedData -@typing.final +@typing_extensions.final class ExtractedTextWrapper(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -804,13 +801,13 @@ class ExtractedTextWrapper(google.protobuf.message.Message): file: global___CloudFile | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["body", b"body", "field", b"field", "file", b"file", "file_or_data", b"file_or_data"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["body", b"body", "field", b"field", "file", b"file", "file_or_data", b"file_or_data"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["file_or_data", b"file_or_data"]) -> typing.Literal["body", "file"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["body", b"body", "field", b"field", "file", b"file", "file_or_data", b"file_or_data"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "field", b"field", "file", b"file", "file_or_data", b"file_or_data"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["file_or_data", b"file_or_data"]) -> typing_extensions.Literal["body", "file"] | None: ... global___ExtractedTextWrapper = ExtractedTextWrapper -@typing.final +@typing_extensions.final class ExtractedVectorsWrapper(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -830,17 +827,17 @@ class ExtractedVectorsWrapper(google.protobuf.message.Message): file: global___CloudFile | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "vectors", b"vectors"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "vectors", b"vectors"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["file_or_data", b"file_or_data"]) -> typing.Literal["vectors", "file"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "vectors", b"vectors"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "vectors", b"vectors"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["file_or_data", b"file_or_data"]) -> typing_extensions.Literal["vectors", "file"] | None: ... global___ExtractedVectorsWrapper = ExtractedVectorsWrapper -@typing.final +@typing_extensions.final class UserVectorsWrapper(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class VectorsToDeleteEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -855,8 +852,8 @@ class UserVectorsWrapper(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.utils_pb2.UserVectorsList | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... VECTORS_FIELD_NUMBER: builtins.int VECTORS_TO_DELETE_FIELD_NUMBER: builtins.int @@ -866,7 +863,6 @@ class UserVectorsWrapper(google.protobuf.message.Message): @property def vectors_to_delete(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.utils_pb2.UserVectorsList]: """Vectorset prefix vector id""" - @property def field(self) -> global___FieldID: ... def __init__( @@ -876,12 +872,12 @@ class UserVectorsWrapper(google.protobuf.message.Message): vectors_to_delete: collections.abc.Mapping[builtins.str, nucliadb_protos.utils_pb2.UserVectorsList] | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "vectors", b"vectors"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "vectors", b"vectors", "vectors_to_delete", b"vectors_to_delete"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "vectors", b"vectors"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "vectors", b"vectors", "vectors_to_delete", b"vectors_to_delete"]) -> None: ... global___UserVectorsWrapper = UserVectorsWrapper -@typing.final +@typing_extensions.final class Sentence(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -898,11 +894,11 @@ class Sentence(google.protobuf.message.Message): end: builtins.int = ..., key: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "key", b"key", "start", b"start"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "key", b"key", "start", b"start"]) -> None: ... global___Sentence = Sentence -@typing.final +@typing_extensions.final class PageInformation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -916,11 +912,11 @@ class PageInformation(google.protobuf.message.Message): page: builtins.int = ..., page_with_visual: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["page", b"page", "page_with_visual", b"page_with_visual"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["page", b"page", "page_with_visual", b"page_with_visual"]) -> None: ... global___PageInformation = PageInformation -@typing.final +@typing_extensions.final class Representation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -934,11 +930,11 @@ class Representation(google.protobuf.message.Message): is_a_table: builtins.bool = ..., reference_file: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["is_a_table", b"is_a_table", "reference_file", b"reference_file"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["is_a_table", b"is_a_table", "reference_file", b"reference_file"]) -> None: ... global___Representation = Representation -@typing.final +@typing_extensions.final class ParagraphRelations(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -958,11 +954,11 @@ class ParagraphRelations(google.protobuf.message.Message): siblings: collections.abc.Iterable[builtins.str] | None = ..., replacements: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["parents", b"parents", "replacements", b"replacements", "siblings", b"siblings"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["parents", b"parents", "replacements", b"replacements", "siblings", b"siblings"]) -> None: ... global___ParagraphRelations = ParagraphRelations -@typing.final +@typing_extensions.final class Paragraph(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -970,7 +966,7 @@ class Paragraph(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeParagraphEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Paragraph._TypeParagraph.ValueType], builtins.type): + class _TypeParagraphEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Paragraph._TypeParagraph.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TEXT: Paragraph._TypeParagraph.ValueType # 0 OCR: Paragraph._TypeParagraph.ValueType # 1 @@ -1003,18 +999,18 @@ class Paragraph(google.protobuf.message.Message): RELATIONS_FIELD_NUMBER: builtins.int start: builtins.int end: builtins.int - kind: global___Paragraph.TypeParagraph.ValueType - key: builtins.str - text: builtins.str - """Optional, as a computed value""" @property def start_seconds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... @property def end_seconds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + kind: global___Paragraph.TypeParagraph.ValueType @property def classifications(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Classification]: ... @property def sentences(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Sentence]: ... + key: builtins.str + text: builtins.str + """Optional, as a computed value""" @property def page(self) -> global___PageInformation: ... @property @@ -1037,12 +1033,12 @@ class Paragraph(google.protobuf.message.Message): representation: global___Representation | None = ..., relations: global___ParagraphRelations | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["page", b"page", "relations", b"relations", "representation", b"representation"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["classifications", b"classifications", "end", b"end", "end_seconds", b"end_seconds", "key", b"key", "kind", b"kind", "page", b"page", "relations", b"relations", "representation", b"representation", "sentences", b"sentences", "start", b"start", "start_seconds", b"start_seconds", "text", b"text"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["page", b"page", "relations", b"relations", "representation", b"representation"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["classifications", b"classifications", "end", b"end", "end_seconds", b"end_seconds", "key", b"key", "kind", b"kind", "page", b"page", "relations", b"relations", "representation", b"representation", "sentences", b"sentences", "start", b"start", "start_seconds", b"start_seconds", "text", b"text"]) -> None: ... global___Paragraph = Paragraph -@typing.final +@typing_extensions.final class Position(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1056,34 +1052,34 @@ class Position(google.protobuf.message.Message): start: builtins.int = ..., end: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> None: ... global___Position = Position -@typing.final +@typing_extensions.final class Positions(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor POSITION_FIELD_NUMBER: builtins.int ENTITY_FIELD_NUMBER: builtins.int - entity: builtins.str @property def position(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Position]: ... + entity: builtins.str def __init__( self, *, position: collections.abc.Iterable[global___Position] | None = ..., entity: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entity", b"entity", "position", b"position"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity", b"entity", "position", b"position"]) -> None: ... global___Positions = Positions -@typing.final +@typing_extensions.final class FieldMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class NerEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1097,9 +1093,9 @@ class FieldMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class PositionsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1114,8 +1110,8 @@ class FieldMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: global___Positions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... LINKS_FIELD_NUMBER: builtins.int PARAGRAPHS_FIELD_NUMBER: builtins.int @@ -1130,8 +1126,6 @@ class FieldMetadata(google.protobuf.message.Message): SUMMARY_FIELD_NUMBER: builtins.int POSITIONS_FIELD_NUMBER: builtins.int RELATIONS_FIELD_NUMBER: builtins.int - language: builtins.str - summary: builtins.str @property def links(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... @property @@ -1139,7 +1133,6 @@ class FieldMetadata(google.protobuf.message.Message): @property def ner(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Document""" - @property def classifications(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Classification]: ... @property @@ -1152,10 +1145,11 @@ class FieldMetadata(google.protobuf.message.Message): def last_summary(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property def thumbnail(self) -> global___CloudFile: ... + language: builtins.str + summary: builtins.str @property def positions(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Positions]: """Document""" - @property def relations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Relations]: ... def __init__( @@ -1175,12 +1169,12 @@ class FieldMetadata(google.protobuf.message.Message): positions: collections.abc.Mapping[builtins.str, global___Positions] | None = ..., relations: collections.abc.Iterable[global___Relations] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["last_extract", b"last_extract", "last_index", b"last_index", "last_summary", b"last_summary", "last_understanding", b"last_understanding", "thumbnail", b"thumbnail"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["classifications", b"classifications", "language", b"language", "last_extract", b"last_extract", "last_index", b"last_index", "last_summary", b"last_summary", "last_understanding", b"last_understanding", "links", b"links", "ner", b"ner", "paragraphs", b"paragraphs", "positions", b"positions", "relations", b"relations", "summary", b"summary", "thumbnail", b"thumbnail"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["last_extract", b"last_extract", "last_index", b"last_index", "last_summary", b"last_summary", "last_understanding", b"last_understanding", "thumbnail", b"thumbnail"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["classifications", b"classifications", "language", b"language", "last_extract", b"last_extract", "last_index", b"last_index", "last_summary", b"last_summary", "last_understanding", b"last_understanding", "links", b"links", "ner", b"ner", "paragraphs", b"paragraphs", "positions", b"positions", "relations", b"relations", "summary", b"summary", "thumbnail", b"thumbnail"]) -> None: ... global___FieldMetadata = FieldMetadata -@typing.final +@typing_extensions.final class Question(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1198,11 +1192,11 @@ class Question(google.protobuf.message.Message): language: builtins.str = ..., ids_paragraphs: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["ids_paragraphs", b"ids_paragraphs", "language", b"language", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ids_paragraphs", b"ids_paragraphs", "language", b"language", "text", b"text"]) -> None: ... global___Question = Question -@typing.final +@typing_extensions.final class Answers(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1220,11 +1214,11 @@ class Answers(google.protobuf.message.Message): language: builtins.str = ..., ids_paragraphs: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["ids_paragraphs", b"ids_paragraphs", "language", b"language", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ids_paragraphs", b"ids_paragraphs", "language", b"language", "text", b"text"]) -> None: ... global___Answers = Answers -@typing.final +@typing_extensions.final class QuestionAnswer(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1240,12 +1234,12 @@ class QuestionAnswer(google.protobuf.message.Message): question: global___Question | None = ..., answers: collections.abc.Iterable[global___Answers] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["question", b"question"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["answers", b"answers", "question", b"question"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["question", b"question"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["answers", b"answers", "question", b"question"]) -> None: ... global___QuestionAnswer = QuestionAnswer -@typing.final +@typing_extensions.final class QuestionAnswers(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1257,11 +1251,11 @@ class QuestionAnswers(google.protobuf.message.Message): *, question_answer: collections.abc.Iterable[global___QuestionAnswer] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["question_answer", b"question_answer"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["question_answer", b"question_answer"]) -> None: ... global___QuestionAnswers = QuestionAnswers -@typing.final +@typing_extensions.final class FieldQuestionAnswerWrapper(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1281,17 +1275,17 @@ class FieldQuestionAnswerWrapper(google.protobuf.message.Message): file: global___CloudFile | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "question_answers", b"question_answers"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "question_answers", b"question_answers"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["file_or_data", b"file_or_data"]) -> typing.Literal["question_answers", "file"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "question_answers", b"question_answers"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "question_answers", b"question_answers"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["file_or_data", b"file_or_data"]) -> typing_extensions.Literal["question_answers", "file"] | None: ... global___FieldQuestionAnswerWrapper = FieldQuestionAnswerWrapper -@typing.final +@typing_extensions.final class FieldComputedMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class SplitMetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1306,8 +1300,8 @@ class FieldComputedMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: global___FieldMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... METADATA_FIELD_NUMBER: builtins.int SPLIT_METADATA_FIELD_NUMBER: builtins.int @@ -1325,12 +1319,12 @@ class FieldComputedMetadata(google.protobuf.message.Message): split_metadata: collections.abc.Mapping[builtins.str, global___FieldMetadata] | None = ..., deleted_splits: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deleted_splits", b"deleted_splits", "metadata", b"metadata", "split_metadata", b"split_metadata"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deleted_splits", b"deleted_splits", "metadata", b"metadata", "split_metadata", b"split_metadata"]) -> None: ... global___FieldComputedMetadata = FieldComputedMetadata -@typing.final +@typing_extensions.final class FieldComputedMetadataWrapper(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1346,12 +1340,12 @@ class FieldComputedMetadataWrapper(google.protobuf.message.Message): metadata: global___FieldComputedMetadata | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata"]) -> None: ... global___FieldComputedMetadataWrapper = FieldComputedMetadataWrapper -@typing.final +@typing_extensions.final class Metadata(google.protobuf.message.Message): """Mutable behaviors""" @@ -1361,7 +1355,7 @@ class Metadata(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Metadata._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Metadata._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PENDING: Metadata._Status.ValueType # 0 PROCESSED: Metadata._Status.ValueType # 1 @@ -1376,7 +1370,7 @@ class Metadata(google.protobuf.message.Message): BLOCKED: Metadata.Status.ValueType # 3 EXPIRED: Metadata.Status.ValueType # 4 - @typing.final + @typing_extensions.final class MetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1390,20 +1384,20 @@ class Metadata(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... METADATA_FIELD_NUMBER: builtins.int LANGUAGE_FIELD_NUMBER: builtins.int LANGUAGES_FIELD_NUMBER: builtins.int USEFUL_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - language: builtins.str - useful: builtins.bool - status: global___Metadata.Status.ValueType @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + language: builtins.str @property def languages(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + useful: builtins.bool + status: global___Metadata.Status.ValueType def __init__( self, *, @@ -1413,11 +1407,11 @@ class Metadata(google.protobuf.message.Message): useful: builtins.bool = ..., status: global___Metadata.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["language", b"language", "languages", b"languages", "metadata", b"metadata", "status", b"status", "useful", b"useful"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["language", b"language", "languages", b"languages", "metadata", b"metadata", "status", b"status", "useful", b"useful"]) -> None: ... global___Metadata = Metadata -@typing.final +@typing_extensions.final class FieldText(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1425,7 +1419,7 @@ class FieldText(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldText._Format.ValueType], builtins.type): + class _FormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldText._Format.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PLAIN: FieldText._Format.ValueType # 0 HTML: FieldText._Format.ValueType # 1 @@ -1457,11 +1451,11 @@ class FieldText(google.protobuf.message.Message): format: global___FieldText.Format.ValueType = ..., md5: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["body", b"body", "format", b"format", "md5", b"md5"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "format", b"format", "md5", b"md5"]) -> None: ... global___FieldText = FieldText -@typing.final +@typing_extensions.final class Block(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1469,7 +1463,7 @@ class Block(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeBlockEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Block._TypeBlock.ValueType], builtins.type): + class _TypeBlockEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Block._TypeBlock.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor TITLE: Block._TypeBlock.ValueType # 0 DESCRIPTION: Block._TypeBlock.ValueType # 1 @@ -1517,16 +1511,16 @@ class Block(google.protobuf.message.Message): payload: builtins.str = ..., file: global___CloudFile | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["file", b"file"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cols", b"cols", "file", b"file", "ident", b"ident", "payload", b"payload", "rows", b"rows", "type", b"type", "x", b"x", "y", b"y"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["file", b"file"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["cols", b"cols", "file", b"file", "ident", b"ident", "payload", b"payload", "rows", b"rows", "type", b"type", "x", b"x", "y", b"y"]) -> None: ... global___Block = Block -@typing.final +@typing_extensions.final class LayoutContent(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class BlocksEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1541,8 +1535,8 @@ class LayoutContent(google.protobuf.message.Message): key: builtins.str = ..., value: global___Block | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... BLOCKS_FIELD_NUMBER: builtins.int DELETED_BLOCKS_FIELD_NUMBER: builtins.int @@ -1556,11 +1550,11 @@ class LayoutContent(google.protobuf.message.Message): blocks: collections.abc.Mapping[builtins.str, global___Block] | None = ..., deleted_blocks: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["blocks", b"blocks", "deleted_blocks", b"deleted_blocks"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["blocks", b"blocks", "deleted_blocks", b"deleted_blocks"]) -> None: ... global___LayoutContent = LayoutContent -@typing.final +@typing_extensions.final class FieldLayout(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1568,7 +1562,7 @@ class FieldLayout(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _FormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldLayout._Format.ValueType], builtins.type): + class _FormatEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldLayout._Format.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NUCLIAv1: FieldLayout._Format.ValueType # 0 @@ -1577,21 +1571,21 @@ class FieldLayout(google.protobuf.message.Message): BODY_FIELD_NUMBER: builtins.int FORMAT_FIELD_NUMBER: builtins.int - format: global___FieldLayout.Format.ValueType @property def body(self) -> global___LayoutContent: ... + format: global___FieldLayout.Format.ValueType def __init__( self, *, body: global___LayoutContent | None = ..., format: global___FieldLayout.Format.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["body", b"body"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["body", b"body", "format", b"format"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["body", b"body"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["body", b"body", "format", b"format"]) -> None: ... global___FieldLayout = FieldLayout -@typing.final +@typing_extensions.final class Classification(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1612,11 +1606,11 @@ class Classification(google.protobuf.message.Message): cancelled_by_user: builtins.bool = ..., split: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cancelled_by_user", b"cancelled_by_user", "label", b"label", "labelset", b"labelset", "split", b"split"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["cancelled_by_user", b"cancelled_by_user", "label", b"label", "labelset", b"labelset", "split", b"split"]) -> None: ... global___Classification = Classification -@typing.final +@typing_extensions.final class UserMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1632,11 +1626,11 @@ class UserMetadata(google.protobuf.message.Message): classifications: collections.abc.Iterable[global___Classification] | None = ..., relations: collections.abc.Iterable[nucliadb_protos.utils_pb2.Relation] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["classifications", b"classifications", "relations", b"relations"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["classifications", b"classifications", "relations", b"relations"]) -> None: ... global___UserMetadata = UserMetadata -@typing.final +@typing_extensions.final class FieldClassifications(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1652,12 +1646,12 @@ class FieldClassifications(google.protobuf.message.Message): field: global___FieldID | None = ..., classifications: collections.abc.Iterable[global___Classification] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["classifications", b"classifications", "field", b"field"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["classifications", b"classifications", "field", b"field"]) -> None: ... global___FieldClassifications = FieldClassifications -@typing.final +@typing_extensions.final class ComputedMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1669,11 +1663,11 @@ class ComputedMetadata(google.protobuf.message.Message): *, field_classifications: collections.abc.Iterable[global___FieldClassifications] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["field_classifications", b"field_classifications"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field_classifications", b"field_classifications"]) -> None: ... global___ComputedMetadata = ComputedMetadata -@typing.final +@typing_extensions.final class TokenSplit(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1699,11 +1693,11 @@ class TokenSplit(google.protobuf.message.Message): cancelled_by_user: builtins.bool = ..., split: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cancelled_by_user", b"cancelled_by_user", "end", b"end", "klass", b"klass", "split", b"split", "start", b"start", "token", b"token"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["cancelled_by_user", b"cancelled_by_user", "end", b"end", "klass", b"klass", "split", b"split", "start", b"start", "token", b"token"]) -> None: ... global___TokenSplit = TokenSplit -@typing.final +@typing_extensions.final class ParagraphAnnotation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1718,31 +1712,31 @@ class ParagraphAnnotation(google.protobuf.message.Message): key: builtins.str = ..., classifications: collections.abc.Iterable[global___Classification] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["classifications", b"classifications", "key", b"key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["classifications", b"classifications", "key", b"key"]) -> None: ... global___ParagraphAnnotation = ParagraphAnnotation -@typing.final +@typing_extensions.final class QuestionAnswerAnnotation(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor QUESTION_ANSWER_FIELD_NUMBER: builtins.int CANCELLED_BY_USER_FIELD_NUMBER: builtins.int - cancelled_by_user: builtins.bool @property def question_answer(self) -> global___QuestionAnswer: ... + cancelled_by_user: builtins.bool def __init__( self, *, question_answer: global___QuestionAnswer | None = ..., cancelled_by_user: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["question_answer", b"question_answer"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["cancelled_by_user", b"cancelled_by_user", "question_answer", b"question_answer"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["question_answer", b"question_answer"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["cancelled_by_user", b"cancelled_by_user", "question_answer", b"question_answer"]) -> None: ... global___QuestionAnswerAnnotation = QuestionAnswerAnnotation -@typing.final +@typing_extensions.final class VisualSelection(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1760,7 +1754,6 @@ class VisualSelection(google.protobuf.message.Message): @property def token_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: """Token IDs are the indexes in PageStructure""" - def __init__( self, *, @@ -1771,11 +1764,11 @@ class VisualSelection(google.protobuf.message.Message): bottom: builtins.float = ..., token_ids: collections.abc.Iterable[builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bottom", b"bottom", "label", b"label", "left", b"left", "right", b"right", "token_ids", b"token_ids", "top", b"top"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bottom", b"bottom", "label", b"label", "left", b"left", "right", b"right", "token_ids", b"token_ids", "top", b"top"]) -> None: ... global___VisualSelection = VisualSelection -@typing.final +@typing_extensions.final class PageSelections(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1790,11 +1783,11 @@ class PageSelections(google.protobuf.message.Message): page: builtins.int = ..., visual: collections.abc.Iterable[global___VisualSelection] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["page", b"page", "visual", b"visual"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["page", b"page", "visual", b"visual"]) -> None: ... global___PageSelections = PageSelections -@typing.final +@typing_extensions.final class UserFieldMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1822,16 +1815,16 @@ class UserFieldMetadata(google.protobuf.message.Message): question_answers: collections.abc.Iterable[global___QuestionAnswerAnnotation] | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "page_selections", b"page_selections", "paragraphs", b"paragraphs", "question_answers", b"question_answers", "token", b"token"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "page_selections", b"page_selections", "paragraphs", b"paragraphs", "question_answers", b"question_answers", "token", b"token"]) -> None: ... global___UserFieldMetadata = UserFieldMetadata -@typing.final +@typing_extensions.final class FieldLink(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class HeadersEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1845,9 +1838,9 @@ class FieldLink(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class CookiesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1861,9 +1854,9 @@ class FieldLink(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class LocalstorageEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1877,7 +1870,7 @@ class FieldLink(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ADDED_FIELD_NUMBER: builtins.int HEADERS_FIELD_NUMBER: builtins.int @@ -1887,18 +1880,18 @@ class FieldLink(google.protobuf.message.Message): LOCALSTORAGE_FIELD_NUMBER: builtins.int CSS_SELECTOR_FIELD_NUMBER: builtins.int XPATH_FIELD_NUMBER: builtins.int - uri: builtins.str - language: builtins.str - css_selector: builtins.str - xpath: builtins.str @property def added(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property def headers(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... @property def cookies(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + uri: builtins.str + language: builtins.str @property def localstorage(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + css_selector: builtins.str + xpath: builtins.str def __init__( self, *, @@ -1911,12 +1904,12 @@ class FieldLink(google.protobuf.message.Message): css_selector: builtins.str = ..., xpath: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["added", b"added"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["added", b"added", "cookies", b"cookies", "css_selector", b"css_selector", "headers", b"headers", "language", b"language", "localstorage", b"localstorage", "uri", b"uri", "xpath", b"xpath"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["added", b"added"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["added", b"added", "cookies", b"cookies", "css_selector", b"css_selector", "headers", b"headers", "language", b"language", "localstorage", b"localstorage", "uri", b"uri", "xpath", b"xpath"]) -> None: ... global___FieldLink = FieldLink -@typing.final +@typing_extensions.final class Keyword(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1927,11 +1920,11 @@ class Keyword(google.protobuf.message.Message): *, value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... global___Keyword = Keyword -@typing.final +@typing_extensions.final class FieldKeywordset(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1943,11 +1936,11 @@ class FieldKeywordset(google.protobuf.message.Message): *, keywords: collections.abc.Iterable[global___Keyword] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["keywords", b"keywords"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["keywords", b"keywords"]) -> None: ... global___FieldKeywordset = FieldKeywordset -@typing.final +@typing_extensions.final class FieldDatetime(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1959,16 +1952,16 @@ class FieldDatetime(google.protobuf.message.Message): *, value: google.protobuf.timestamp_pb2.Timestamp | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... global___FieldDatetime = FieldDatetime -@typing.final +@typing_extensions.final class FieldFile(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class HeadersEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1982,7 +1975,7 @@ class FieldFile(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ADDED_FIELD_NUMBER: builtins.int FILE_FIELD_NUMBER: builtins.int @@ -1990,13 +1983,13 @@ class FieldFile(google.protobuf.message.Message): PASSWORD_FIELD_NUMBER: builtins.int URL_FIELD_NUMBER: builtins.int HEADERS_FIELD_NUMBER: builtins.int - language: builtins.str - password: builtins.str - url: builtins.str @property def added(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property def file(self) -> global___CloudFile: ... + language: builtins.str + password: builtins.str + url: builtins.str @property def headers(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... def __init__( @@ -2009,12 +2002,12 @@ class FieldFile(google.protobuf.message.Message): url: builtins.str = ..., headers: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["added", b"added", "file", b"file"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["added", b"added", "file", b"file", "headers", b"headers", "language", b"language", "password", b"password", "url", b"url"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["added", b"added", "file", b"file"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["added", b"added", "file", b"file", "headers", b"headers", "language", b"language", "password", b"password", "url", b"url"]) -> None: ... global___FieldFile = FieldFile -@typing.final +@typing_extensions.final class Entity(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2031,15 +2024,15 @@ class Entity(google.protobuf.message.Message): root: builtins.str = ..., type: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["root", b"root", "token", b"token", "type", b"type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["root", b"root", "token", b"token", "type", b"type"]) -> None: ... global___Entity = Entity -@typing.final +@typing_extensions.final class FieldLargeMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class TokensEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2053,7 +2046,7 @@ class FieldLargeMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... ENTITIES_FIELD_NUMBER: builtins.int TOKENS_FIELD_NUMBER: builtins.int @@ -2067,15 +2060,15 @@ class FieldLargeMetadata(google.protobuf.message.Message): entities: collections.abc.Iterable[global___Entity] | None = ..., tokens: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities", "tokens", b"tokens"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "tokens", b"tokens"]) -> None: ... global___FieldLargeMetadata = FieldLargeMetadata -@typing.final +@typing_extensions.final class LargeComputedMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class SplitMetadataEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2090,8 +2083,8 @@ class LargeComputedMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: global___FieldLargeMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... METADATA_FIELD_NUMBER: builtins.int SPLIT_METADATA_FIELD_NUMBER: builtins.int @@ -2109,12 +2102,12 @@ class LargeComputedMetadata(google.protobuf.message.Message): split_metadata: collections.abc.Mapping[builtins.str, global___FieldLargeMetadata] | None = ..., deleted_splits: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deleted_splits", b"deleted_splits", "metadata", b"metadata", "split_metadata", b"split_metadata"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deleted_splits", b"deleted_splits", "metadata", b"metadata", "split_metadata", b"split_metadata"]) -> None: ... global___LargeComputedMetadata = LargeComputedMetadata -@typing.final +@typing_extensions.final class LargeComputedMetadataWrapper(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2134,13 +2127,13 @@ class LargeComputedMetadataWrapper(google.protobuf.message.Message): file: global___CloudFile | None = ..., field: global___FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "real", b"real"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "real", b"real"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["file_or_data", b"file_or_data"]) -> typing.Literal["real", "file"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "real", b"real"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "file", b"file", "file_or_data", b"file_or_data", "real", b"real"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["file_or_data", b"file_or_data"]) -> typing_extensions.Literal["real", "file"] | None: ... global___LargeComputedMetadataWrapper = LargeComputedMetadataWrapper -@typing.final +@typing_extensions.final class PagePositions(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2154,11 +2147,11 @@ class PagePositions(google.protobuf.message.Message): start: builtins.int = ..., end: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> None: ... global___PagePositions = PagePositions -@typing.final +@typing_extensions.final class PageStructurePage(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2172,11 +2165,11 @@ class PageStructurePage(google.protobuf.message.Message): width: builtins.int = ..., height: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["height", b"height", "width", b"width"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["height", b"height", "width", b"width"]) -> None: ... global___PageStructurePage = PageStructurePage -@typing.final +@typing_extensions.final class PageStructureToken(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2202,11 +2195,11 @@ class PageStructureToken(google.protobuf.message.Message): text: builtins.str = ..., line: builtins.float = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["height", b"height", "line", b"line", "text", b"text", "width", b"width", "x", b"x", "y", b"y"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["height", b"height", "line", b"line", "text", b"text", "width", b"width", "x", b"x", "y", b"y"]) -> None: ... global___PageStructureToken = PageStructureToken -@typing.final +@typing_extensions.final class PageStructure(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2222,12 +2215,12 @@ class PageStructure(google.protobuf.message.Message): page: global___PageStructurePage | None = ..., tokens: collections.abc.Iterable[global___PageStructureToken] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["page", b"page"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["page", b"page", "tokens", b"tokens"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["page", b"page"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["page", b"page", "tokens", b"tokens"]) -> None: ... global___PageStructure = PageStructure -@typing.final +@typing_extensions.final class FilePages(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2247,19 +2240,19 @@ class FilePages(google.protobuf.message.Message): positions: collections.abc.Iterable[global___PagePositions] | None = ..., structures: collections.abc.Iterable[global___PageStructure] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["pages", b"pages", "positions", b"positions", "structures", b"structures"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["pages", b"pages", "positions", b"positions", "structures", b"structures"]) -> None: ... global___FilePages = FilePages -@typing.final +@typing_extensions.final class RowsPreview(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class Sheet(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class Row(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2271,7 +2264,7 @@ class RowsPreview(google.protobuf.message.Message): *, cell: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["cell", b"cell"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["cell", b"cell"]) -> None: ... ROWS_FIELD_NUMBER: builtins.int @property @@ -2281,9 +2274,9 @@ class RowsPreview(google.protobuf.message.Message): *, rows: collections.abc.Iterable[global___RowsPreview.Sheet.Row] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["rows", b"rows"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["rows", b"rows"]) -> None: ... - @typing.final + @typing_extensions.final class SheetsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2298,8 +2291,8 @@ class RowsPreview(google.protobuf.message.Message): key: builtins.str = ..., value: global___RowsPreview.Sheet | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... SHEETS_FIELD_NUMBER: builtins.int @property @@ -2309,11 +2302,11 @@ class RowsPreview(google.protobuf.message.Message): *, sheets: collections.abc.Mapping[builtins.str, global___RowsPreview.Sheet] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["sheets", b"sheets"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["sheets", b"sheets"]) -> None: ... global___RowsPreview = RowsPreview -@typing.final +@typing_extensions.final class FieldID(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -2327,11 +2320,11 @@ class FieldID(google.protobuf.message.Message): field_type: global___FieldType.ValueType = ..., field: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "field_type", b"field_type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "field_type", b"field_type"]) -> None: ... global___FieldID = FieldID -@typing.final +@typing_extensions.final class AllFieldIDs(google.protobuf.message.Message): """This message is used to store a list of all field ids of a particular resource. Note that title and summary fields are not included. @@ -2347,6 +2340,6 @@ class AllFieldIDs(google.protobuf.message.Message): *, fields: collections.abc.Iterable[global___FieldID] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["fields", b"fields"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields"]) -> None: ... global___AllFieldIDs = AllFieldIDs diff --git a/nucliadb_protos/python/nucliadb_protos/standalone_pb2.py b/nucliadb_protos/python/nucliadb_protos/standalone_pb2.py index 8116ca6326..48aac50533 100644 --- a/nucliadb_protos/python/nucliadb_protos/standalone_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/standalone_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/standalone.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/standalone_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/standalone_pb2.pyi index 01afc7c1ff..c2e82a42cc 100644 --- a/nucliadb_protos/python/nucliadb_protos/standalone_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/standalone_pb2.pyi @@ -2,15 +2,19 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import google.protobuf.descriptor import google.protobuf.message -import typing +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class NodeActionRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -27,11 +31,11 @@ class NodeActionRequest(google.protobuf.message.Message): action: builtins.str = ..., payload: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "payload", b"payload", "service", b"service"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["action", b"action", "payload", b"payload", "service", b"service"]) -> None: ... global___NodeActionRequest = NodeActionRequest -@typing.final +@typing_extensions.final class NodeActionResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -42,11 +46,11 @@ class NodeActionResponse(google.protobuf.message.Message): *, payload: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["payload", b"payload"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["payload", b"payload"]) -> None: ... global___NodeActionResponse = NodeActionResponse -@typing.final +@typing_extensions.final class NodeInfoRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -56,7 +60,7 @@ class NodeInfoRequest(google.protobuf.message.Message): global___NodeInfoRequest = NodeInfoRequest -@typing.final +@typing_extensions.final class NodeInfoResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -79,6 +83,6 @@ class NodeInfoResponse(google.protobuf.message.Message): available_disk: builtins.int = ..., total_disk: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["address", b"address", "available_disk", b"available_disk", "id", b"id", "shard_count", b"shard_count", "total_disk", b"total_disk"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["address", b"address", "available_disk", b"available_disk", "id", b"id", "shard_count", b"shard_count", "total_disk", b"total_disk"]) -> None: ... global___NodeInfoResponse = NodeInfoResponse diff --git a/nucliadb_protos/python/nucliadb_protos/standalone_pb2_grpc.pyi b/nucliadb_protos/python/nucliadb_protos/standalone_pb2_grpc.pyi index 5438f1086a..db822104af 100644 --- a/nucliadb_protos/python/nucliadb_protos/standalone_pb2_grpc.pyi +++ b/nucliadb_protos/python/nucliadb_protos/standalone_pb2_grpc.pyi @@ -2,57 +2,33 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import abc -import collections.abc import grpc -import grpc.aio import nucliadb_protos.standalone_pb2 -import typing - -_T = typing.TypeVar("_T") - -class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... - -class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] - ... class StandaloneClusterServiceStub: - def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + def __init__(self, channel: grpc.Channel) -> None: ... NodeAction: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.standalone_pb2.NodeActionRequest, nucliadb_protos.standalone_pb2.NodeActionResponse, ] - NodeInfo: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.standalone_pb2.NodeInfoRequest, nucliadb_protos.standalone_pb2.NodeInfoResponse, ] -class StandaloneClusterServiceAsyncStub: - NodeAction: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.standalone_pb2.NodeActionRequest, - nucliadb_protos.standalone_pb2.NodeActionResponse, - ] - - NodeInfo: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.standalone_pb2.NodeInfoRequest, - nucliadb_protos.standalone_pb2.NodeInfoResponse, - ] - class StandaloneClusterServiceServicer(metaclass=abc.ABCMeta): @abc.abstractmethod def NodeAction( self, request: nucliadb_protos.standalone_pb2.NodeActionRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.standalone_pb2.NodeActionResponse, collections.abc.Awaitable[nucliadb_protos.standalone_pb2.NodeActionResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.standalone_pb2.NodeActionResponse: ... @abc.abstractmethod def NodeInfo( self, request: nucliadb_protos.standalone_pb2.NodeInfoRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.standalone_pb2.NodeInfoResponse, collections.abc.Awaitable[nucliadb_protos.standalone_pb2.NodeInfoResponse]]: ... + context: grpc.ServicerContext, + ) -> nucliadb_protos.standalone_pb2.NodeInfoResponse: ... -def add_StandaloneClusterServiceServicer_to_server(servicer: StandaloneClusterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... +def add_StandaloneClusterServiceServicer_to_server(servicer: StandaloneClusterServiceServicer, server: grpc.Server) -> None: ... diff --git a/nucliadb_protos/python/nucliadb_protos/train_pb2.py b/nucliadb_protos/python/nucliadb_protos/train_pb2.py index 534e405a4c..21a1e53a74 100644 --- a/nucliadb_protos/python/nucliadb_protos/train_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/train_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/train.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/train_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/train_pb2.pyi index 3810b5090e..87df4a79c8 100644 --- a/nucliadb_protos/python/nucliadb_protos/train_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/train_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -11,7 +10,12 @@ import google.protobuf.message import google.protobuf.timestamp_pb2 import nucliadb_protos.knowledgebox_pb2 import nucliadb_protos.resources_pb2 -import typing +import sys + +if sys.version_info >= (3, 8): + import typing as typing_extensions +else: + import typing_extensions from nucliadb_protos.knowledgebox_pb2 import ( CONFLICT as CONFLICT, DeleteKnowledgeBoxResponse as DeleteKnowledgeBoxResponse, @@ -174,7 +178,7 @@ from nucliadb_protos.writer_pb2 import ( DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -@typing.final +@typing_extensions.final class EnabledMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -194,11 +198,11 @@ class EnabledMetadata(google.protobuf.message.Message): labels: builtins.bool = ..., vector: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities", "labels", b"labels", "text", b"text", "vector", b"vector"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "labels", b"labels", "text", b"text", "vector", b"vector"]) -> None: ... global___EnabledMetadata = EnabledMetadata -@typing.final +@typing_extensions.final class TrainLabels(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -218,11 +222,11 @@ class TrainLabels(google.protobuf.message.Message): field: collections.abc.Iterable[nucliadb_protos.resources_pb2.Classification] | None = ..., paragraph: collections.abc.Iterable[nucliadb_protos.resources_pb2.Classification] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "paragraph", b"paragraph", "resource", b"resource"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "paragraph", b"paragraph", "resource", b"resource"]) -> None: ... global___TrainLabels = TrainLabels -@typing.final +@typing_extensions.final class Position(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -236,11 +240,11 @@ class Position(google.protobuf.message.Message): start: builtins.int = ..., end: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "start", b"start"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "start", b"start"]) -> None: ... global___Position = Position -@typing.final +@typing_extensions.final class EntityPositions(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -255,15 +259,15 @@ class EntityPositions(google.protobuf.message.Message): entity: builtins.str = ..., positions: collections.abc.Iterable[global___Position] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entity", b"entity", "positions", b"positions"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity", b"entity", "positions", b"positions"]) -> None: ... global___EntityPositions = EntityPositions -@typing.final +@typing_extensions.final class TrainMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class EntitiesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -277,9 +281,9 @@ class TrainMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class EntityPositionsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -294,8 +298,8 @@ class TrainMetadata(google.protobuf.message.Message): key: builtins.str = ..., value: global___EntityPositions | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... TEXT_FIELD_NUMBER: builtins.int ENTITIES_FIELD_NUMBER: builtins.int @@ -320,12 +324,12 @@ class TrainMetadata(google.protobuf.message.Message): labels: global___TrainLabels | None = ..., vector: collections.abc.Iterable[builtins.float] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["labels", b"labels"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities", "entity_positions", b"entity_positions", "labels", b"labels", "text", b"text", "vector", b"vector"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["labels", b"labels"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "entity_positions", b"entity_positions", "labels", b"labels", "text", b"text", "vector", b"vector"]) -> None: ... global___TrainMetadata = TrainMetadata -@typing.final +@typing_extensions.final class GetInfoRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -337,12 +341,12 @@ class GetInfoRequest(google.protobuf.message.Message): *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> None: ... global___GetInfoRequest = GetInfoRequest -@typing.final +@typing_extensions.final class GetLabelsetsCountRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -362,12 +366,12 @@ class GetLabelsetsCountRequest(google.protobuf.message.Message): paragraph_labelsets: collections.abc.Iterable[builtins.str] | None = ..., resource_labelsets: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "paragraph_labelsets", b"paragraph_labelsets", "resource_labelsets", b"resource_labelsets"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "paragraph_labelsets", b"paragraph_labelsets", "resource_labelsets", b"resource_labelsets"]) -> None: ... global___GetLabelsetsCountRequest = GetLabelsetsCountRequest -@typing.final +@typing_extensions.final class GetResourcesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -375,12 +379,12 @@ class GetResourcesRequest(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int SIZE_FIELD_NUMBER: builtins.int RANDOM_FIELD_NUMBER: builtins.int - size: builtins.int - random: builtins.bool @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... @property def metadata(self) -> global___EnabledMetadata: ... + size: builtins.int + random: builtins.bool def __init__( self, *, @@ -389,12 +393,12 @@ class GetResourcesRequest(google.protobuf.message.Message): size: builtins.int = ..., random: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size"]) -> None: ... global___GetResourcesRequest = GetResourcesRequest -@typing.final +@typing_extensions.final class GetParagraphsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -404,15 +408,15 @@ class GetParagraphsRequest(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int SIZE_FIELD_NUMBER: builtins.int RANDOM_FIELD_NUMBER: builtins.int - uuid: builtins.str - size: builtins.int - random: builtins.bool @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + uuid: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... @property def metadata(self) -> global___EnabledMetadata: ... + size: builtins.int + random: builtins.bool def __init__( self, *, @@ -423,12 +427,12 @@ class GetParagraphsRequest(google.protobuf.message.Message): size: builtins.int = ..., random: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size", "uuid", b"uuid"]) -> None: ... global___GetParagraphsRequest = GetParagraphsRequest -@typing.final +@typing_extensions.final class GetSentencesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -438,15 +442,15 @@ class GetSentencesRequest(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int SIZE_FIELD_NUMBER: builtins.int RANDOM_FIELD_NUMBER: builtins.int - uuid: builtins.str - size: builtins.int - random: builtins.bool @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + uuid: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... @property def metadata(self) -> global___EnabledMetadata: ... + size: builtins.int + random: builtins.bool def __init__( self, *, @@ -457,12 +461,12 @@ class GetSentencesRequest(google.protobuf.message.Message): size: builtins.int = ..., random: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size", "uuid", b"uuid"]) -> None: ... global___GetSentencesRequest = GetSentencesRequest -@typing.final +@typing_extensions.final class GetFieldsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -472,15 +476,15 @@ class GetFieldsRequest(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int SIZE_FIELD_NUMBER: builtins.int RANDOM_FIELD_NUMBER: builtins.int - uuid: builtins.str - size: builtins.int - random: builtins.bool @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + uuid: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... @property def metadata(self) -> global___EnabledMetadata: ... + size: builtins.int + random: builtins.bool def __init__( self, *, @@ -491,12 +495,12 @@ class GetFieldsRequest(google.protobuf.message.Message): size: builtins.int = ..., random: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "kb", b"kb", "metadata", b"metadata", "random", b"random", "size", b"size", "uuid", b"uuid"]) -> None: ... global___GetFieldsRequest = GetFieldsRequest -@typing.final +@typing_extensions.final class TrainInfo(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -516,11 +520,11 @@ class TrainInfo(google.protobuf.message.Message): paragraphs: builtins.int = ..., sentences: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["fields", b"fields", "paragraphs", b"paragraphs", "resources", b"resources", "sentences", b"sentences"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields", "paragraphs", b"paragraphs", "resources", b"resources", "sentences", b"sentences"]) -> None: ... global___TrainInfo = TrainInfo -@typing.final +@typing_extensions.final class TrainSentence(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -530,10 +534,10 @@ class TrainSentence(google.protobuf.message.Message): SENTENCE_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int uuid: builtins.str - paragraph: builtins.str - sentence: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... + paragraph: builtins.str + sentence: builtins.str @property def metadata(self) -> global___TrainMetadata: ... def __init__( @@ -545,12 +549,12 @@ class TrainSentence(google.protobuf.message.Message): sentence: builtins.str = ..., metadata: global___TrainMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata", "paragraph", b"paragraph", "sentence", b"sentence", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata", "paragraph", b"paragraph", "sentence", b"sentence", "uuid", b"uuid"]) -> None: ... global___TrainSentence = TrainSentence -@typing.final +@typing_extensions.final class TrainParagraph(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -559,9 +563,9 @@ class TrainParagraph(google.protobuf.message.Message): PARAGRAPH_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int uuid: builtins.str - paragraph: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... + paragraph: builtins.str @property def metadata(self) -> global___TrainMetadata: ... def __init__( @@ -572,12 +576,12 @@ class TrainParagraph(google.protobuf.message.Message): paragraph: builtins.str = ..., metadata: global___TrainMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata", "paragraph", b"paragraph", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata", "paragraph", b"paragraph", "uuid", b"uuid"]) -> None: ... global___TrainParagraph = TrainParagraph -@typing.final +@typing_extensions.final class TrainField(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -586,9 +590,9 @@ class TrainField(google.protobuf.message.Message): SUBFIELD_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int uuid: builtins.str - subfield: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... + subfield: builtins.str @property def metadata(self) -> global___TrainMetadata: ... def __init__( @@ -599,12 +603,12 @@ class TrainField(google.protobuf.message.Message): subfield: builtins.str = ..., metadata: global___TrainMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "metadata", b"metadata", "subfield", b"subfield", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "metadata", b"metadata", "subfield", b"subfield", "uuid", b"uuid"]) -> None: ... global___TrainField = TrainField -@typing.final +@typing_extensions.final class TrainResource(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -636,16 +640,16 @@ class TrainResource(google.protobuf.message.Message): modified: google.protobuf.timestamp_pb2.Timestamp | None = ..., metadata: global___TrainMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["created", b"created", "metadata", b"metadata", "modified", b"modified"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["created", b"created", "icon", b"icon", "metadata", b"metadata", "modified", b"modified", "slug", b"slug", "title", b"title", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["created", b"created", "metadata", b"metadata", "modified", b"modified"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["created", b"created", "icon", b"icon", "metadata", b"metadata", "modified", b"modified", "slug", b"slug", "title", b"title", "uuid", b"uuid"]) -> None: ... global___TrainResource = TrainResource -@typing.final +@typing_extensions.final class LabelsetCount(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ParagraphsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -659,9 +663,9 @@ class LabelsetCount(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class ResourcesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -675,7 +679,7 @@ class LabelsetCount(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... PARAGRAPHS_FIELD_NUMBER: builtins.int RESOURCES_FIELD_NUMBER: builtins.int @@ -689,15 +693,15 @@ class LabelsetCount(google.protobuf.message.Message): paragraphs: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., resources: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["paragraphs", b"paragraphs", "resources", b"resources"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["paragraphs", b"paragraphs", "resources", b"resources"]) -> None: ... global___LabelsetCount = LabelsetCount -@typing.final +@typing_extensions.final class LabelsetsCount(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class LabelsetsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -712,8 +716,8 @@ class LabelsetsCount(google.protobuf.message.Message): key: builtins.str = ..., value: global___LabelsetCount | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... LABELSETS_FIELD_NUMBER: builtins.int @property @@ -723,6 +727,6 @@ class LabelsetsCount(google.protobuf.message.Message): *, labelsets: collections.abc.Mapping[builtins.str, global___LabelsetCount] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["labelsets", b"labelsets"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["labelsets", b"labelsets"]) -> None: ... global___LabelsetsCount = LabelsetsCount diff --git a/nucliadb_protos/python/nucliadb_protos/train_pb2_grpc.pyi b/nucliadb_protos/python/nucliadb_protos/train_pb2_grpc.pyi index b4c132b699..18e9b1efed 100644 --- a/nucliadb_protos/python/nucliadb_protos/train_pb2_grpc.pyi +++ b/nucliadb_protos/python/nucliadb_protos/train_pb2_grpc.pyi @@ -2,14 +2,11 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import abc import collections.abc import grpc -import grpc.aio import nucliadb_protos.train_pb2 import nucliadb_protos.writer_pb2 -import typing from nucliadb_protos.knowledgebox_pb2 import ( CONFLICT as CONFLICT, DeleteKnowledgeBoxResponse as DeleteKnowledgeBoxResponse, @@ -170,151 +167,89 @@ from nucliadb_protos.writer_pb2 import ( WriterStatusResponse as WriterStatusResponse, ) -_T = typing.TypeVar("_T") - -class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... - -class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] - ... - class TrainStub: - def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + def __init__(self, channel: grpc.Channel) -> None: ... GetInfo: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.train_pb2.GetInfoRequest, nucliadb_protos.train_pb2.TrainInfo, ] - GetSentences: grpc.UnaryStreamMultiCallable[ nucliadb_protos.train_pb2.GetSentencesRequest, nucliadb_protos.train_pb2.TrainSentence, ] - GetParagraphs: grpc.UnaryStreamMultiCallable[ nucliadb_protos.train_pb2.GetParagraphsRequest, nucliadb_protos.train_pb2.TrainParagraph, ] - GetFields: grpc.UnaryStreamMultiCallable[ nucliadb_protos.train_pb2.GetFieldsRequest, nucliadb_protos.train_pb2.TrainField, ] - GetResources: grpc.UnaryStreamMultiCallable[ nucliadb_protos.train_pb2.GetResourcesRequest, nucliadb_protos.train_pb2.TrainResource, ] - GetOntology: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.GetLabelsRequest, nucliadb_protos.writer_pb2.GetLabelsResponse, ] - GetEntities: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.GetEntitiesRequest, nucliadb_protos.writer_pb2.GetEntitiesResponse, ] - GetOntologyCount: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.train_pb2.GetLabelsetsCountRequest, nucliadb_protos.train_pb2.LabelsetsCount, ] -class TrainAsyncStub: - GetInfo: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.train_pb2.GetInfoRequest, - nucliadb_protos.train_pb2.TrainInfo, - ] - - GetSentences: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.train_pb2.GetSentencesRequest, - nucliadb_protos.train_pb2.TrainSentence, - ] - - GetParagraphs: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.train_pb2.GetParagraphsRequest, - nucliadb_protos.train_pb2.TrainParagraph, - ] - - GetFields: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.train_pb2.GetFieldsRequest, - nucliadb_protos.train_pb2.TrainField, - ] - - GetResources: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.train_pb2.GetResourcesRequest, - nucliadb_protos.train_pb2.TrainResource, - ] - - GetOntology: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.GetLabelsRequest, - nucliadb_protos.writer_pb2.GetLabelsResponse, - ] - - GetEntities: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.GetEntitiesRequest, - nucliadb_protos.writer_pb2.GetEntitiesResponse, - ] - - GetOntologyCount: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.train_pb2.GetLabelsetsCountRequest, - nucliadb_protos.train_pb2.LabelsetsCount, - ] - class TrainServicer(metaclass=abc.ABCMeta): @abc.abstractmethod def GetInfo( self, request: nucliadb_protos.train_pb2.GetInfoRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.train_pb2.TrainInfo, collections.abc.Awaitable[nucliadb_protos.train_pb2.TrainInfo]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.train_pb2.TrainInfo: ... @abc.abstractmethod def GetSentences( self, request: nucliadb_protos.train_pb2.GetSentencesRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.train_pb2.TrainSentence], collections.abc.AsyncIterator[nucliadb_protos.train_pb2.TrainSentence]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.train_pb2.TrainSentence]: ... @abc.abstractmethod def GetParagraphs( self, request: nucliadb_protos.train_pb2.GetParagraphsRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.train_pb2.TrainParagraph], collections.abc.AsyncIterator[nucliadb_protos.train_pb2.TrainParagraph]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.train_pb2.TrainParagraph]: ... @abc.abstractmethod def GetFields( self, request: nucliadb_protos.train_pb2.GetFieldsRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.train_pb2.TrainField], collections.abc.AsyncIterator[nucliadb_protos.train_pb2.TrainField]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.train_pb2.TrainField]: ... @abc.abstractmethod def GetResources( self, request: nucliadb_protos.train_pb2.GetResourcesRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.train_pb2.TrainResource], collections.abc.AsyncIterator[nucliadb_protos.train_pb2.TrainResource]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.train_pb2.TrainResource]: ... @abc.abstractmethod def GetOntology( self, request: nucliadb_protos.writer_pb2.GetLabelsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetLabelsResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetLabelsResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetLabelsResponse: ... @abc.abstractmethod def GetEntities( self, request: nucliadb_protos.writer_pb2.GetEntitiesRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetEntitiesResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetEntitiesResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetEntitiesResponse: ... @abc.abstractmethod def GetOntologyCount( self, request: nucliadb_protos.train_pb2.GetLabelsetsCountRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.train_pb2.LabelsetsCount, collections.abc.Awaitable[nucliadb_protos.train_pb2.LabelsetsCount]]: ... + context: grpc.ServicerContext, + ) -> nucliadb_protos.train_pb2.LabelsetsCount: ... -def add_TrainServicer_to_server(servicer: TrainServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... +def add_TrainServicer_to_server(servicer: TrainServicer, server: grpc.Server) -> None: ... diff --git a/nucliadb_protos/python/nucliadb_protos/utils_pb2.py b/nucliadb_protos/python/nucliadb_protos/utils_pb2.py index 3a7856f7c4..7fc2112180 100644 --- a/nucliadb_protos/python/nucliadb_protos/utils_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/utils_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/utils.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/utils_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/utils_pb2.pyi index 23372dbea5..c1ba45b153 100644 --- a/nucliadb_protos/python/nucliadb_protos/utils_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/utils_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -49,7 +48,7 @@ STABLE: ReleaseChannel.ValueType # 0 EXPERIMENTAL: ReleaseChannel.ValueType # 1 global___ReleaseChannel = ReleaseChannel -@typing.final +@typing_extensions.final class Relation(google.protobuf.message.Message): """Relations are connexions between nodes in the relation index. They are tuplets (Source, Relation Type, Relation Label, To). @@ -61,7 +60,7 @@ class Relation(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _RelationTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Relation._RelationType.ValueType], builtins.type): + class _RelationTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Relation._RelationType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor CHILD: Relation._RelationType.ValueType # 0 """Child resource""" @@ -95,12 +94,12 @@ class Relation(google.protobuf.message.Message): RELATION_FIELD_NUMBER: builtins.int RELATION_LABEL_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int - relation: global___Relation.RelationType.ValueType - relation_label: builtins.str @property def source(self) -> global___RelationNode: ... @property def to(self) -> global___RelationNode: ... + relation: global___Relation.RelationType.ValueType + relation_label: builtins.str @property def metadata(self) -> global___RelationMetadata: ... def __init__( @@ -112,12 +111,12 @@ class Relation(google.protobuf.message.Message): relation_label: builtins.str = ..., metadata: global___RelationMetadata | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata", "source", b"source", "to", b"to"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["metadata", b"metadata", "relation", b"relation", "relation_label", b"relation_label", "source", b"source", "to", b"to"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "source", b"source", "to", b"to"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "relation", b"relation", "relation_label", b"relation_label", "source", b"source", "to", b"to"]) -> None: ... global___Relation = Relation -@typing.final +@typing_extensions.final class RelationMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -140,22 +139,22 @@ class RelationMetadata(google.protobuf.message.Message): to_start: builtins.int | None = ..., to_end: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_paragraph_id", b"_paragraph_id", "_source_end", b"_source_end", "_source_start", b"_source_start", "_to_end", b"_to_end", "_to_start", b"_to_start", "paragraph_id", b"paragraph_id", "source_end", b"source_end", "source_start", b"source_start", "to_end", b"to_end", "to_start", b"to_start"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_paragraph_id", b"_paragraph_id", "_source_end", b"_source_end", "_source_start", b"_source_start", "_to_end", b"_to_end", "_to_start", b"_to_start", "paragraph_id", b"paragraph_id", "source_end", b"source_end", "source_start", b"source_start", "to_end", b"to_end", "to_start", b"to_start"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["_paragraph_id", b"_paragraph_id", "_source_end", b"_source_end", "_source_start", b"_source_start", "_to_end", b"_to_end", "_to_start", b"_to_start", "paragraph_id", b"paragraph_id", "source_end", b"source_end", "source_start", b"source_start", "to_end", b"to_end", "to_start", b"to_start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["_paragraph_id", b"_paragraph_id", "_source_end", b"_source_end", "_source_start", b"_source_start", "_to_end", b"_to_end", "_to_start", b"_to_start", "paragraph_id", b"paragraph_id", "source_end", b"source_end", "source_start", b"source_start", "to_end", b"to_end", "to_start", b"to_start"]) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_paragraph_id", b"_paragraph_id"]) -> typing.Literal["paragraph_id"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_paragraph_id", b"_paragraph_id"]) -> typing_extensions.Literal["paragraph_id"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_source_end", b"_source_end"]) -> typing.Literal["source_end"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_source_end", b"_source_end"]) -> typing_extensions.Literal["source_end"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_source_start", b"_source_start"]) -> typing.Literal["source_start"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_source_start", b"_source_start"]) -> typing_extensions.Literal["source_start"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_to_end", b"_to_end"]) -> typing.Literal["to_end"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_to_end", b"_to_end"]) -> typing_extensions.Literal["to_end"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_to_start", b"_to_start"]) -> typing.Literal["to_start"] | None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["_to_start", b"_to_start"]) -> typing_extensions.Literal["to_start"] | None: ... global___RelationMetadata = RelationMetadata -@typing.final +@typing_extensions.final class RelationNode(google.protobuf.message.Message): """Nodes are tuplets (Value, Type, Subtype) and they are the main element in the relation index.""" @@ -165,7 +164,7 @@ class RelationNode(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _NodeTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RelationNode._NodeType.ValueType], builtins.type): + class _NodeTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RelationNode._NodeType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor ENTITY: RelationNode._NodeType.ValueType # 0 LABEL: RelationNode._NodeType.ValueType # 1 @@ -194,15 +193,15 @@ class RelationNode(google.protobuf.message.Message): ntype: global___RelationNode.NodeType.ValueType = ..., subtype: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["ntype", b"ntype", "subtype", b"subtype", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["ntype", b"ntype", "subtype", b"subtype", "value", b"value"]) -> None: ... global___RelationNode = RelationNode -@typing.final +@typing_extensions.final class ExtractedText(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class SplitTextEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -216,7 +215,7 @@ class ExtractedText(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... TEXT_FIELD_NUMBER: builtins.int SPLIT_TEXT_FIELD_NUMBER: builtins.int @@ -233,11 +232,11 @@ class ExtractedText(google.protobuf.message.Message): split_text: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., deleted_splits: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["deleted_splits", b"deleted_splits", "split_text", b"split_text", "text", b"text"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["deleted_splits", b"deleted_splits", "split_text", b"split_text", "text", b"text"]) -> None: ... global___ExtractedText = ExtractedText -@typing.final +@typing_extensions.final class Vector(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -261,11 +260,11 @@ class Vector(google.protobuf.message.Message): end_paragraph: builtins.int = ..., vector: collections.abc.Iterable[builtins.float] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "end_paragraph", b"end_paragraph", "start", b"start", "start_paragraph", b"start_paragraph", "vector", b"vector"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "end_paragraph", b"end_paragraph", "start", b"start", "start_paragraph", b"start_paragraph", "vector", b"vector"]) -> None: ... global___Vector = Vector -@typing.final +@typing_extensions.final class Vectors(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -277,15 +276,15 @@ class Vectors(google.protobuf.message.Message): *, vectors: collections.abc.Iterable[global___Vector] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["vectors", b"vectors"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["vectors", b"vectors"]) -> None: ... global___Vectors = Vectors -@typing.final +@typing_extensions.final class VectorObject(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class SplitVectorsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -300,8 +299,8 @@ class VectorObject(google.protobuf.message.Message): key: builtins.str = ..., value: global___Vectors | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... VECTORS_FIELD_NUMBER: builtins.int SPLIT_VECTORS_FIELD_NUMBER: builtins.int @@ -319,12 +318,12 @@ class VectorObject(google.protobuf.message.Message): split_vectors: collections.abc.Mapping[builtins.str, global___Vectors] | None = ..., deleted_splits: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["vectors", b"vectors"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["deleted_splits", b"deleted_splits", "split_vectors", b"split_vectors", "vectors", b"vectors"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["vectors", b"vectors"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deleted_splits", b"deleted_splits", "split_vectors", b"split_vectors", "vectors", b"vectors"]) -> None: ... global___VectorObject = VectorObject -@typing.final +@typing_extensions.final class UserVector(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -332,12 +331,12 @@ class UserVector(google.protobuf.message.Message): LABELS_FIELD_NUMBER: builtins.int START_FIELD_NUMBER: builtins.int END_FIELD_NUMBER: builtins.int - start: builtins.int - end: builtins.int @property def vector(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... @property def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + start: builtins.int + end: builtins.int def __init__( self, *, @@ -346,15 +345,15 @@ class UserVector(google.protobuf.message.Message): start: builtins.int = ..., end: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["end", b"end", "labels", b"labels", "start", b"start", "vector", b"vector"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "labels", b"labels", "start", b"start", "vector", b"vector"]) -> None: ... global___UserVector = UserVector -@typing.final +@typing_extensions.final class UserVectors(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class VectorsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -369,28 +368,27 @@ class UserVectors(google.protobuf.message.Message): key: builtins.str = ..., value: global___UserVector | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... VECTORS_FIELD_NUMBER: builtins.int @property def vectors(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___UserVector]: """vector's id""" - def __init__( self, *, vectors: collections.abc.Mapping[builtins.str, global___UserVector] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["vectors", b"vectors"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["vectors", b"vectors"]) -> None: ... global___UserVectors = UserVectors -@typing.final +@typing_extensions.final class UserVectorSet(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class VectorsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -405,24 +403,23 @@ class UserVectorSet(google.protobuf.message.Message): key: builtins.str = ..., value: global___UserVectors | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... VECTORS_FIELD_NUMBER: builtins.int @property def vectors(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___UserVectors]: """vectorsets""" - def __init__( self, *, vectors: collections.abc.Mapping[builtins.str, global___UserVectors] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["vectors", b"vectors"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["vectors", b"vectors"]) -> None: ... global___UserVectorSet = UserVectorSet -@typing.final +@typing_extensions.final class UserVectorsList(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -434,11 +431,11 @@ class UserVectorsList(google.protobuf.message.Message): *, vectors: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["vectors", b"vectors"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["vectors", b"vectors"]) -> None: ... global___UserVectorsList = UserVectorsList -@typing.final +@typing_extensions.final class Security(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -450,6 +447,6 @@ class Security(google.protobuf.message.Message): *, access_groups: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["access_groups", b"access_groups"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["access_groups", b"access_groups"]) -> None: ... global___Security = Security diff --git a/nucliadb_protos/python/nucliadb_protos/writer_pb2.py b/nucliadb_protos/python/nucliadb_protos/writer_pb2.py index 14bdd28ad7..848fb94ba8 100644 --- a/nucliadb_protos/python/nucliadb_protos/writer_pb2.py +++ b/nucliadb_protos/python/nucliadb_protos/writer_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: nucliadb_protos/writer.proto -# Protobuf Python Version: 4.25.1 +# Protobuf Python Version: 4.25.0 """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool diff --git a/nucliadb_protos/python/nucliadb_protos/writer_pb2.pyi b/nucliadb_protos/python/nucliadb_protos/writer_pb2.pyi index b238c60fe0..5cd45b0105 100644 --- a/nucliadb_protos/python/nucliadb_protos/writer_pb2.pyi +++ b/nucliadb_protos/python/nucliadb_protos/writer_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import builtins import collections.abc import google.protobuf.descriptor @@ -73,6 +72,7 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) from nucliadb_protos.resources_pb2 import ( AllFieldIDs as AllFieldIDs, @@ -168,7 +168,7 @@ WRITER: NotificationSource.ValueType # 1 PROCESSOR: NotificationSource.ValueType # 2 global___NotificationSource = NotificationSource -@typing.final +@typing_extensions.final class Audit(google.protobuf.message.Message): """We receive this information throw an stream system""" @@ -178,7 +178,7 @@ class Audit(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Audit._Source.ValueType], builtins.type): + class _SourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Audit._Source.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor HTTP: Audit._Source.ValueType # 0 DASHBOARD: Audit._Source.ValueType # 1 @@ -194,10 +194,10 @@ class Audit(google.protobuf.message.Message): ORIGIN_FIELD_NUMBER: builtins.int SOURCE_FIELD_NUMBER: builtins.int user: builtins.str - origin: builtins.str - source: global___Audit.Source.ValueType @property def when(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + origin: builtins.str + source: global___Audit.Source.ValueType def __init__( self, *, @@ -206,12 +206,12 @@ class Audit(google.protobuf.message.Message): origin: builtins.str = ..., source: global___Audit.Source.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["when", b"when"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["origin", b"origin", "source", b"source", "user", b"user", "when", b"when"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["when", b"when"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["origin", b"origin", "source", b"source", "user", b"user", "when", b"when"]) -> None: ... global___Audit = Audit -@typing.final +@typing_extensions.final class Error(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -219,7 +219,7 @@ class Error(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ErrorCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Error._ErrorCode.ValueType], builtins.type): + class _ErrorCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Error._ErrorCode.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor GENERIC: Error._ErrorCode.ValueType # 0 EXTRACT: Error._ErrorCode.ValueType # 1 @@ -246,11 +246,11 @@ class Error(google.protobuf.message.Message): error: builtins.str = ..., code: global___Error.ErrorCode.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["code", b"code", "error", b"error", "field", b"field", "field_type", b"field_type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["code", b"code", "error", b"error", "field", b"field", "field_type", b"field_type"]) -> None: ... global___Error = Error -@typing.final +@typing_extensions.final class BrokerMessage(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -258,7 +258,7 @@ class BrokerMessage(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MessageTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BrokerMessage._MessageType.ValueType], builtins.type): + class _MessageTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BrokerMessage._MessageType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor AUTOCOMMIT: BrokerMessage._MessageType.ValueType # 0 MULTI: BrokerMessage._MessageType.ValueType # 1 @@ -277,7 +277,7 @@ class BrokerMessage(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _MessageSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BrokerMessage._MessageSource.ValueType], builtins.type): + class _MessageSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BrokerMessage._MessageSource.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor WRITER: BrokerMessage._MessageSource.ValueType # 0 PROCESSOR: BrokerMessage._MessageSource.ValueType # 1 @@ -286,7 +286,7 @@ class BrokerMessage(google.protobuf.message.Message): WRITER: BrokerMessage.MessageSource.ValueType # 0 PROCESSOR: BrokerMessage.MessageSource.ValueType # 1 - @typing.final + @typing_extensions.final class ConversationsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -301,10 +301,10 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.Conversation | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class LayoutsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -319,10 +319,10 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.FieldLayout | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class TextsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -337,10 +337,10 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.FieldText | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class KeywordsetsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -355,10 +355,10 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.FieldKeywordset | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class DatetimesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -373,10 +373,10 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.FieldDatetime | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class LinksEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -391,10 +391,10 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.FieldLink | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class FilesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -409,8 +409,8 @@ class BrokerMessage(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.resources_pb2.FieldFile | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KBID_FIELD_NUMBER: builtins.int UUID_FIELD_NUMBER: builtins.int @@ -452,20 +452,10 @@ class BrokerMessage(google.protobuf.message.Message): kbid: builtins.str uuid: builtins.str slug: builtins.str - type: global___BrokerMessage.MessageType.ValueType - multiid: builtins.str - origin_seq: builtins.int - slow_processing_time: builtins.float - pre_processing_time: builtins.float - txseqid: builtins.int - """Not needed anymore""" - processing_id: builtins.str - source: global___BrokerMessage.MessageSource.ValueType - account_seq: builtins.int - reindex: builtins.bool - """If true, force reindex all paragraphs in a resource""" @property def audit(self) -> global___Audit: ... + type: global___BrokerMessage.MessageType.ValueType + multiid: builtins.str @property def basic(self) -> nucliadb_protos.resources_pb2.Basic: ... @property @@ -475,43 +465,33 @@ class BrokerMessage(google.protobuf.message.Message): @property def conversations(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.Conversation]: """Field Conversations""" - @property def layouts(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.FieldLayout]: """Field Layout""" - @property def texts(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.FieldText]: """Field Text""" - @property def keywordsets(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.FieldKeywordset]: """Field keyword""" - @property def datetimes(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.FieldDatetime]: """Field Datetime""" - @property def links(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.FieldLink]: """Field Links""" - @property def files(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.resources_pb2.FieldFile]: """Field File""" - @property def link_extracted_data(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.LinkExtractedData]: """Link extracted extra info""" - @property def file_extracted_data(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.FileExtractedData]: """File extracted extra info""" - @property def extracted_text(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.ExtractedTextWrapper]: """Field Extracted/Computed information""" - @property def field_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.FieldComputedMetadataWrapper]: ... @property @@ -519,15 +499,24 @@ class BrokerMessage(google.protobuf.message.Message): @property def field_large_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.LargeComputedMetadataWrapper]: """Resource Large Computed Metadata""" - @property def delete_fields(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.FieldID]: ... + origin_seq: builtins.int + slow_processing_time: builtins.float + pre_processing_time: builtins.float @property def done_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + txseqid: builtins.int + """Not needed anymore""" @property def errors(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Error]: ... + processing_id: builtins.str + source: global___BrokerMessage.MessageSource.ValueType + account_seq: builtins.int @property def user_vectors(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[nucliadb_protos.resources_pb2.UserVectorsWrapper]: ... + reindex: builtins.bool + """If true, force reindex all paragraphs in a resource""" @property def extra(self) -> nucliadb_protos.resources_pb2.Extra: ... @property @@ -575,12 +564,12 @@ class BrokerMessage(google.protobuf.message.Message): question_answers: collections.abc.Iterable[nucliadb_protos.resources_pb2.FieldQuestionAnswerWrapper] | None = ..., security: nucliadb_protos.utils_pb2.Security | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["audit", b"audit", "basic", b"basic", "done_time", b"done_time", "extra", b"extra", "origin", b"origin", "security", b"security"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["account_seq", b"account_seq", "audit", b"audit", "basic", b"basic", "conversations", b"conversations", "datetimes", b"datetimes", "delete_fields", b"delete_fields", "done_time", b"done_time", "errors", b"errors", "extra", b"extra", "extracted_text", b"extracted_text", "field_large_metadata", b"field_large_metadata", "field_metadata", b"field_metadata", "field_vectors", b"field_vectors", "file_extracted_data", b"file_extracted_data", "files", b"files", "kbid", b"kbid", "keywordsets", b"keywordsets", "layouts", b"layouts", "link_extracted_data", b"link_extracted_data", "links", b"links", "multiid", b"multiid", "origin", b"origin", "origin_seq", b"origin_seq", "pre_processing_time", b"pre_processing_time", "processing_id", b"processing_id", "question_answers", b"question_answers", "reindex", b"reindex", "relations", b"relations", "security", b"security", "slow_processing_time", b"slow_processing_time", "slug", b"slug", "source", b"source", "texts", b"texts", "txseqid", b"txseqid", "type", b"type", "user_vectors", b"user_vectors", "uuid", b"uuid"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["audit", b"audit", "basic", b"basic", "done_time", b"done_time", "extra", b"extra", "origin", b"origin", "security", b"security"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["account_seq", b"account_seq", "audit", b"audit", "basic", b"basic", "conversations", b"conversations", "datetimes", b"datetimes", "delete_fields", b"delete_fields", "done_time", b"done_time", "errors", b"errors", "extra", b"extra", "extracted_text", b"extracted_text", "field_large_metadata", b"field_large_metadata", "field_metadata", b"field_metadata", "field_vectors", b"field_vectors", "file_extracted_data", b"file_extracted_data", "files", b"files", "kbid", b"kbid", "keywordsets", b"keywordsets", "layouts", b"layouts", "link_extracted_data", b"link_extracted_data", "links", b"links", "multiid", b"multiid", "origin", b"origin", "origin_seq", b"origin_seq", "pre_processing_time", b"pre_processing_time", "processing_id", b"processing_id", "question_answers", b"question_answers", "reindex", b"reindex", "relations", b"relations", "security", b"security", "slow_processing_time", b"slow_processing_time", "slug", b"slug", "source", b"source", "texts", b"texts", "txseqid", b"txseqid", "type", b"type", "user_vectors", b"user_vectors", "uuid", b"uuid"]) -> None: ... global___BrokerMessage = BrokerMessage -@typing.final +@typing_extensions.final class BrokerMessageBlobReference(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -597,15 +586,15 @@ class BrokerMessageBlobReference(google.protobuf.message.Message): uuid: builtins.str = ..., storage_key: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid", "storage_key", b"storage_key", "uuid", b"uuid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "storage_key", b"storage_key", "uuid", b"uuid"]) -> None: ... global___BrokerMessageBlobReference = BrokerMessageBlobReference -@typing.final +@typing_extensions.final class WriterStatusResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class MsgidEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -619,7 +608,7 @@ class WriterStatusResponse(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KNOWLEDGEBOXES_FIELD_NUMBER: builtins.int MSGID_FIELD_NUMBER: builtins.int @@ -628,18 +617,17 @@ class WriterStatusResponse(google.protobuf.message.Message): @property def msgid(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]: """map of last message processed""" - def __init__( self, *, knowledgeboxes: collections.abc.Iterable[builtins.str] | None = ..., msgid: collections.abc.Mapping[builtins.str, builtins.int] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["knowledgeboxes", b"knowledgeboxes", "msgid", b"msgid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["knowledgeboxes", b"knowledgeboxes", "msgid", b"msgid"]) -> None: ... global___WriterStatusResponse = WriterStatusResponse -@typing.final +@typing_extensions.final class WriterStatusRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -649,16 +637,16 @@ class WriterStatusRequest(google.protobuf.message.Message): global___WriterStatusRequest = WriterStatusRequest -@typing.final +@typing_extensions.final class SetLabelsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int ID_FIELD_NUMBER: builtins.int LABELSET_FIELD_NUMBER: builtins.int - id: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + id: builtins.str @property def labelset(self) -> nucliadb_protos.knowledgebox_pb2.LabelSet: ... def __init__( @@ -668,32 +656,32 @@ class SetLabelsRequest(google.protobuf.message.Message): id: builtins.str = ..., labelset: nucliadb_protos.knowledgebox_pb2.LabelSet | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb", "labelset", b"labelset"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "kb", b"kb", "labelset", b"labelset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb", "labelset", b"labelset"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "kb", b"kb", "labelset", b"labelset"]) -> None: ... global___SetLabelsRequest = SetLabelsRequest -@typing.final +@typing_extensions.final class DelLabelsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int ID_FIELD_NUMBER: builtins.int - id: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + id: builtins.str def __init__( self, *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., id: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "kb", b"kb"]) -> None: ... global___DelLabelsRequest = DelLabelsRequest -@typing.final +@typing_extensions.final class GetLabelsResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -701,7 +689,7 @@ class GetLabelsResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetLabelsResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetLabelsResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: GetLabelsResponse._Status.ValueType # 0 NOTFOUND: GetLabelsResponse._Status.ValueType # 1 @@ -713,11 +701,11 @@ class GetLabelsResponse(google.protobuf.message.Message): KB_FIELD_NUMBER: builtins.int LABELS_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - status: global___GetLabelsResponse.Status.ValueType @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... @property def labels(self) -> nucliadb_protos.knowledgebox_pb2.Labels: ... + status: global___GetLabelsResponse.Status.ValueType def __init__( self, *, @@ -725,12 +713,12 @@ class GetLabelsResponse(google.protobuf.message.Message): labels: nucliadb_protos.knowledgebox_pb2.Labels | None = ..., status: global___GetLabelsResponse.Status.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb", "labels", b"labels"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "labels", b"labels", "status", b"status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb", "labels", b"labels"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "labels", b"labels", "status", b"status"]) -> None: ... global___GetLabelsResponse = GetLabelsResponse -@typing.final +@typing_extensions.final class GetLabelsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -742,21 +730,21 @@ class GetLabelsRequest(google.protobuf.message.Message): *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> None: ... global___GetLabelsRequest = GetLabelsRequest -@typing.final +@typing_extensions.final class NewEntitiesGroupRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int GROUP_FIELD_NUMBER: builtins.int ENTITIES_FIELD_NUMBER: builtins.int - group: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + group: builtins.str @property def entities(self) -> nucliadb_protos.knowledgebox_pb2.EntitiesGroup: ... def __init__( @@ -766,12 +754,12 @@ class NewEntitiesGroupRequest(google.protobuf.message.Message): group: builtins.str = ..., entities: nucliadb_protos.knowledgebox_pb2.EntitiesGroup | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["entities", b"entities", "kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities", "group", b"group", "kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["entities", b"entities", "kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "group", b"group", "kb", b"kb"]) -> None: ... global___NewEntitiesGroupRequest = NewEntitiesGroupRequest -@typing.final +@typing_extensions.final class NewEntitiesGroupResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -779,7 +767,7 @@ class NewEntitiesGroupResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[NewEntitiesGroupResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[NewEntitiesGroupResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: NewEntitiesGroupResponse._Status.ValueType # 0 ERROR: NewEntitiesGroupResponse._Status.ValueType # 1 @@ -799,20 +787,20 @@ class NewEntitiesGroupResponse(google.protobuf.message.Message): *, status: global___NewEntitiesGroupResponse.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___NewEntitiesGroupResponse = NewEntitiesGroupResponse -@typing.final +@typing_extensions.final class SetEntitiesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int GROUP_FIELD_NUMBER: builtins.int ENTITIES_FIELD_NUMBER: builtins.int - group: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + group: builtins.str @property def entities(self) -> nucliadb_protos.knowledgebox_pb2.EntitiesGroup: ... def __init__( @@ -822,16 +810,16 @@ class SetEntitiesRequest(google.protobuf.message.Message): group: builtins.str = ..., entities: nucliadb_protos.knowledgebox_pb2.EntitiesGroup | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["entities", b"entities", "kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["entities", b"entities", "group", b"group", "kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["entities", b"entities", "kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["entities", b"entities", "group", b"group", "kb", b"kb"]) -> None: ... global___SetEntitiesRequest = SetEntitiesRequest -@typing.final +@typing_extensions.final class UpdateEntitiesGroupRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class AddEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -846,10 +834,10 @@ class UpdateEntitiesGroupRequest(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.knowledgebox_pb2.Entity | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - @typing.final + @typing_extensions.final class UpdateEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -864,8 +852,8 @@ class UpdateEntitiesGroupRequest(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.knowledgebox_pb2.Entity | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KB_FIELD_NUMBER: builtins.int GROUP_FIELD_NUMBER: builtins.int @@ -874,23 +862,20 @@ class UpdateEntitiesGroupRequest(google.protobuf.message.Message): DELETE_FIELD_NUMBER: builtins.int TITLE_FIELD_NUMBER: builtins.int COLOR_FIELD_NUMBER: builtins.int - group: builtins.str - title: builtins.str - color: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + group: builtins.str @property def add(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.knowledgebox_pb2.Entity]: """entity_id: Entity""" - @property def update(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.knowledgebox_pb2.Entity]: """entity_id: Entity""" - @property def delete(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """entity_id""" - + title: builtins.str + color: builtins.str def __init__( self, *, @@ -902,12 +887,12 @@ class UpdateEntitiesGroupRequest(google.protobuf.message.Message): title: builtins.str = ..., color: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["add", b"add", "color", b"color", "delete", b"delete", "group", b"group", "kb", b"kb", "title", b"title", "update", b"update"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["add", b"add", "color", b"color", "delete", b"delete", "group", b"group", "kb", b"kb", "title", b"title", "update", b"update"]) -> None: ... global___UpdateEntitiesGroupRequest = UpdateEntitiesGroupRequest -@typing.final +@typing_extensions.final class UpdateEntitiesGroupResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -915,7 +900,7 @@ class UpdateEntitiesGroupResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[UpdateEntitiesGroupResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[UpdateEntitiesGroupResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: UpdateEntitiesGroupResponse._Status.ValueType # 0 ERROR: UpdateEntitiesGroupResponse._Status.ValueType # 1 @@ -935,11 +920,11 @@ class UpdateEntitiesGroupResponse(google.protobuf.message.Message): *, status: global___UpdateEntitiesGroupResponse.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___UpdateEntitiesGroupResponse = UpdateEntitiesGroupResponse -@typing.final +@typing_extensions.final class ListEntitiesGroupsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -951,12 +936,12 @@ class ListEntitiesGroupsRequest(google.protobuf.message.Message): *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> None: ... global___ListEntitiesGroupsRequest = ListEntitiesGroupsRequest -@typing.final +@typing_extensions.final class ListEntitiesGroupsResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -964,7 +949,7 @@ class ListEntitiesGroupsResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ListEntitiesGroupsResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ListEntitiesGroupsResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: ListEntitiesGroupsResponse._Status.ValueType # 0 NOTFOUND: ListEntitiesGroupsResponse._Status.ValueType # 1 @@ -975,7 +960,7 @@ class ListEntitiesGroupsResponse(google.protobuf.message.Message): NOTFOUND: ListEntitiesGroupsResponse.Status.ValueType # 1 ERROR: ListEntitiesGroupsResponse.Status.ValueType # 2 - @typing.final + @typing_extensions.final class GroupsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -990,25 +975,25 @@ class ListEntitiesGroupsResponse(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.knowledgebox_pb2.EntitiesGroupSummary | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... GROUPS_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - status: global___ListEntitiesGroupsResponse.Status.ValueType @property def groups(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.knowledgebox_pb2.EntitiesGroupSummary]: ... + status: global___ListEntitiesGroupsResponse.Status.ValueType def __init__( self, *, groups: collections.abc.Mapping[builtins.str, nucliadb_protos.knowledgebox_pb2.EntitiesGroupSummary] | None = ..., status: global___ListEntitiesGroupsResponse.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["groups", b"groups", "status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["groups", b"groups", "status", b"status"]) -> None: ... global___ListEntitiesGroupsResponse = ListEntitiesGroupsResponse -@typing.final +@typing_extensions.final class GetEntitiesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1020,12 +1005,12 @@ class GetEntitiesRequest(google.protobuf.message.Message): *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> None: ... global___GetEntitiesRequest = GetEntitiesRequest -@typing.final +@typing_extensions.final class GetEntitiesResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1033,7 +1018,7 @@ class GetEntitiesResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetEntitiesResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetEntitiesResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: GetEntitiesResponse._Status.ValueType # 0 NOTFOUND: GetEntitiesResponse._Status.ValueType # 1 @@ -1044,7 +1029,7 @@ class GetEntitiesResponse(google.protobuf.message.Message): NOTFOUND: GetEntitiesResponse.Status.ValueType # 1 ERROR: GetEntitiesResponse.Status.ValueType # 2 - @typing.final + @typing_extensions.final class GroupsEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1059,17 +1044,17 @@ class GetEntitiesResponse(google.protobuf.message.Message): key: builtins.str = ..., value: nucliadb_protos.knowledgebox_pb2.EntitiesGroup | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KB_FIELD_NUMBER: builtins.int GROUPS_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - status: global___GetEntitiesResponse.Status.ValueType @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... @property def groups(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, nucliadb_protos.knowledgebox_pb2.EntitiesGroup]: ... + status: global___GetEntitiesResponse.Status.ValueType def __init__( self, *, @@ -1077,36 +1062,36 @@ class GetEntitiesResponse(google.protobuf.message.Message): groups: collections.abc.Mapping[builtins.str, nucliadb_protos.knowledgebox_pb2.EntitiesGroup] | None = ..., status: global___GetEntitiesResponse.Status.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["groups", b"groups", "kb", b"kb", "status", b"status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["groups", b"groups", "kb", b"kb", "status", b"status"]) -> None: ... global___GetEntitiesResponse = GetEntitiesResponse -@typing.final +@typing_extensions.final class DelEntitiesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int GROUP_FIELD_NUMBER: builtins.int - group: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + group: builtins.str def __init__( self, *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., group: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["group", b"group", "kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["group", b"group", "kb", b"kb"]) -> None: ... global___DelEntitiesRequest = DelEntitiesRequest -@typing.final +@typing_extensions.final class MergeEntitiesRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class EntityID(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1120,7 +1105,7 @@ class MergeEntitiesRequest(google.protobuf.message.Message): group: builtins.str = ..., entity: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["entity", b"entity", "group", b"group"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["entity", b"entity", "group", b"group"]) -> None: ... KB_FIELD_NUMBER: builtins.int FROM_FIELD_NUMBER: builtins.int @@ -1135,32 +1120,32 @@ class MergeEntitiesRequest(google.protobuf.message.Message): kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., to: global___MergeEntitiesRequest.EntityID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["from", b"from", "kb", b"kb", "to", b"to"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["from", b"from", "kb", b"kb", "to", b"to"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["from", b"from", "kb", b"kb", "to", b"to"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["from", b"from", "kb", b"kb", "to", b"to"]) -> None: ... global___MergeEntitiesRequest = MergeEntitiesRequest -@typing.final +@typing_extensions.final class GetLabelSetRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int LABELSET_FIELD_NUMBER: builtins.int - labelset: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + labelset: builtins.str def __init__( self, *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., labelset: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "labelset", b"labelset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "labelset", b"labelset"]) -> None: ... global___GetLabelSetRequest = GetLabelSetRequest -@typing.final +@typing_extensions.final class GetLabelSetResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1168,7 +1153,7 @@ class GetLabelSetResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetLabelSetResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetLabelSetResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: GetLabelSetResponse._Status.ValueType # 0 NOTFOUND: GetLabelSetResponse._Status.ValueType # 1 @@ -1180,11 +1165,11 @@ class GetLabelSetResponse(google.protobuf.message.Message): KB_FIELD_NUMBER: builtins.int LABELSET_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - status: global___GetLabelSetResponse.Status.ValueType @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... @property def labelset(self) -> nucliadb_protos.knowledgebox_pb2.LabelSet: ... + status: global___GetLabelSetResponse.Status.ValueType def __init__( self, *, @@ -1192,32 +1177,32 @@ class GetLabelSetResponse(google.protobuf.message.Message): labelset: nucliadb_protos.knowledgebox_pb2.LabelSet | None = ..., status: global___GetLabelSetResponse.Status.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb", "labelset", b"labelset"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "labelset", b"labelset", "status", b"status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb", "labelset", b"labelset"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "labelset", b"labelset", "status", b"status"]) -> None: ... global___GetLabelSetResponse = GetLabelSetResponse -@typing.final +@typing_extensions.final class GetEntitiesGroupRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int GROUP_FIELD_NUMBER: builtins.int - group: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + group: builtins.str def __init__( self, *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., group: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["group", b"group", "kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["group", b"group", "kb", b"kb"]) -> None: ... global___GetEntitiesGroupRequest = GetEntitiesGroupRequest -@typing.final +@typing_extensions.final class GetEntitiesGroupResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1225,7 +1210,7 @@ class GetEntitiesGroupResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetEntitiesGroupResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetEntitiesGroupResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: GetEntitiesGroupResponse._Status.ValueType # 0 KB_NOT_FOUND: GetEntitiesGroupResponse._Status.ValueType # 1 @@ -1241,11 +1226,11 @@ class GetEntitiesGroupResponse(google.protobuf.message.Message): KB_FIELD_NUMBER: builtins.int GROUP_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - status: global___GetEntitiesGroupResponse.Status.ValueType @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... @property def group(self) -> nucliadb_protos.knowledgebox_pb2.EntitiesGroup: ... + status: global___GetEntitiesGroupResponse.Status.ValueType def __init__( self, *, @@ -1253,12 +1238,12 @@ class GetEntitiesGroupResponse(google.protobuf.message.Message): group: nucliadb_protos.knowledgebox_pb2.EntitiesGroup | None = ..., status: global___GetEntitiesGroupResponse.Status.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["group", b"group", "kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["group", b"group", "kb", b"kb", "status", b"status"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["group", b"group", "kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["group", b"group", "kb", b"kb", "status", b"status"]) -> None: ... global___GetEntitiesGroupResponse = GetEntitiesGroupResponse -@typing.final +@typing_extensions.final class GetVectorSetsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1270,12 +1255,12 @@ class GetVectorSetsRequest(google.protobuf.message.Message): *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> None: ... global___GetVectorSetsRequest = GetVectorSetsRequest -@typing.final +@typing_extensions.final class GetVectorSetsResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1283,7 +1268,7 @@ class GetVectorSetsResponse(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetVectorSetsResponse._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GetVectorSetsResponse._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: GetVectorSetsResponse._Status.ValueType # 0 NOTFOUND: GetVectorSetsResponse._Status.ValueType # 1 @@ -1297,11 +1282,11 @@ class GetVectorSetsResponse(google.protobuf.message.Message): KB_FIELD_NUMBER: builtins.int VECTORSETS_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int - status: global___GetVectorSetsResponse.Status.ValueType @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... @property def vectorsets(self) -> nucliadb_protos.knowledgebox_pb2.VectorSets: ... + status: global___GetVectorSetsResponse.Status.ValueType def __init__( self, *, @@ -1309,41 +1294,41 @@ class GetVectorSetsResponse(google.protobuf.message.Message): vectorsets: nucliadb_protos.knowledgebox_pb2.VectorSets | None = ..., status: global___GetVectorSetsResponse.Status.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb", "vectorsets", b"vectorsets"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "status", b"status", "vectorsets", b"vectorsets"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb", "vectorsets", b"vectorsets"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "status", b"status", "vectorsets", b"vectorsets"]) -> None: ... global___GetVectorSetsResponse = GetVectorSetsResponse -@typing.final +@typing_extensions.final class DelVectorSetRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int VECTORSET_FIELD_NUMBER: builtins.int - vectorset: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + vectorset: builtins.str def __init__( self, *, kb: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., vectorset: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kb", b"kb", "vectorset", b"vectorset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kb", b"kb", "vectorset", b"vectorset"]) -> None: ... global___DelVectorSetRequest = DelVectorSetRequest -@typing.final +@typing_extensions.final class SetVectorSetRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KB_FIELD_NUMBER: builtins.int ID_FIELD_NUMBER: builtins.int VECTORSET_FIELD_NUMBER: builtins.int - id: builtins.str @property def kb(self) -> nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID: ... + id: builtins.str @property def vectorset(self) -> nucliadb_protos.knowledgebox_pb2.VectorSet: ... def __init__( @@ -1353,12 +1338,12 @@ class SetVectorSetRequest(google.protobuf.message.Message): id: builtins.str = ..., vectorset: nucliadb_protos.knowledgebox_pb2.VectorSet | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kb", b"kb", "vectorset", b"vectorset"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "kb", b"kb", "vectorset", b"vectorset"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kb", b"kb", "vectorset", b"vectorset"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["id", b"id", "kb", b"kb", "vectorset", b"vectorset"]) -> None: ... global___SetVectorSetRequest = SetVectorSetRequest -@typing.final +@typing_extensions.final class OpStatusWriter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1366,7 +1351,7 @@ class OpStatusWriter(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpStatusWriter._Status.ValueType], builtins.type): + class _StatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[OpStatusWriter._Status.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor OK: OpStatusWriter._Status.ValueType # 0 ERROR: OpStatusWriter._Status.ValueType # 1 @@ -1384,11 +1369,11 @@ class OpStatusWriter(google.protobuf.message.Message): *, status: global___OpStatusWriter.Status.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["status", b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status"]) -> None: ... global___OpStatusWriter = OpStatusWriter -@typing.final +@typing_extensions.final class Notification(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1396,7 +1381,7 @@ class Notification(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Notification._Action.ValueType], builtins.type): + class _ActionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Notification._Action.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor COMMIT: Notification._Action.ValueType # 0 ABORT: Notification._Action.ValueType # 1 @@ -1411,7 +1396,7 @@ class Notification(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _WriteTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Notification._WriteType.ValueType], builtins.type): + class _WriteTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Notification._WriteType.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNSET: Notification._WriteType.ValueType # 0 CREATED: Notification._WriteType.ValueType # 1 @@ -1441,10 +1426,10 @@ class Notification(google.protobuf.message.Message): seqid: builtins.int action: global___Notification.Action.ValueType write_type: global___Notification.WriteType.ValueType - source: global___NotificationSource.ValueType - processing_errors: builtins.bool @property def message(self) -> global___BrokerMessage: ... + source: global___NotificationSource.ValueType + processing_errors: builtins.bool def __init__( self, *, @@ -1459,12 +1444,12 @@ class Notification(google.protobuf.message.Message): source: global___NotificationSource.ValueType = ..., processing_errors: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["message", b"message"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["action", b"action", "kbid", b"kbid", "message", b"message", "multi", b"multi", "partition", b"partition", "processing_errors", b"processing_errors", "seqid", b"seqid", "source", b"source", "uuid", b"uuid", "write_type", b"write_type"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["action", b"action", "kbid", b"kbid", "message", b"message", "multi", b"multi", "partition", b"partition", "processing_errors", b"processing_errors", "seqid", b"seqid", "source", b"source", "uuid", b"uuid", "write_type", b"write_type"]) -> None: ... global___Notification = Notification -@typing.final +@typing_extensions.final class Member(google.protobuf.message.Message): """The member information.""" @@ -1474,7 +1459,7 @@ class Member(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Member._Type.ValueType], builtins.type): + class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Member._Type.ValueType], builtins.type): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor IO: Member._Type.ValueType # 0 SEARCH: Member._Type.ValueType # 1 @@ -1527,11 +1512,11 @@ class Member(google.protobuf.message.Message): shard_count: builtins.int = ..., primary_id: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["dummy", b"dummy", "id", b"id", "is_self", b"is_self", "listen_address", b"listen_address", "load_score", b"load_score", "primary_id", b"primary_id", "shard_count", b"shard_count", "type", b"type"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["dummy", b"dummy", "id", b"id", "is_self", b"is_self", "listen_address", b"listen_address", "load_score", b"load_score", "primary_id", b"primary_id", "shard_count", b"shard_count", "type", b"type"]) -> None: ... global___Member = Member -@typing.final +@typing_extensions.final class ListMembersRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1541,7 +1526,7 @@ class ListMembersRequest(google.protobuf.message.Message): global___ListMembersRequest = ListMembersRequest -@typing.final +@typing_extensions.final class ListMembersResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1553,31 +1538,31 @@ class ListMembersResponse(google.protobuf.message.Message): *, members: collections.abc.Iterable[global___Member] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["members", b"members"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["members", b"members"]) -> None: ... global___ListMembersResponse = ListMembersResponse -@typing.final +@typing_extensions.final class ShardReplica(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor SHARD_FIELD_NUMBER: builtins.int NODE_FIELD_NUMBER: builtins.int - node: builtins.str @property def shard(self) -> nucliadb_protos.noderesources_pb2.ShardCreated: ... + node: builtins.str def __init__( self, *, shard: nucliadb_protos.noderesources_pb2.ShardCreated | None = ..., node: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["shard", b"shard"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["node", b"node", "shard", b"shard"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["shard", b"shard"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["node", b"node", "shard", b"shard"]) -> None: ... global___ShardReplica = ShardReplica -@typing.final +@typing_extensions.final class ShardObject(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1586,11 +1571,11 @@ class ShardObject(google.protobuf.message.Message): TIMESTAMP_FIELD_NUMBER: builtins.int READ_ONLY_FIELD_NUMBER: builtins.int shard: builtins.str - read_only: builtins.bool @property def replicas(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardReplica]: ... @property def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + read_only: builtins.bool def __init__( self, *, @@ -1599,16 +1584,16 @@ class ShardObject(google.protobuf.message.Message): timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., read_only: builtins.bool = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["timestamp", b"timestamp"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["read_only", b"read_only", "replicas", b"replicas", "shard", b"shard", "timestamp", b"timestamp"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["timestamp", b"timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["read_only", b"read_only", "replicas", b"replicas", "shard", b"shard", "timestamp", b"timestamp"]) -> None: ... global___ShardObject = ShardObject -@typing.final +@typing_extensions.final class Shards(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - @typing.final + @typing_extensions.final class ExtraEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1622,7 +1607,7 @@ class Shards(google.protobuf.message.Message): key: builtins.str = ..., value: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... SHARDS_FIELD_NUMBER: builtins.int KBID_FIELD_NUMBER: builtins.int @@ -1631,6 +1616,8 @@ class Shards(google.protobuf.message.Message): MODEL_FIELD_NUMBER: builtins.int RELEASE_CHANNEL_FIELD_NUMBER: builtins.int EXTRA_FIELD_NUMBER: builtins.int + @property + def shards(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardObject]: ... kbid: builtins.str actual: builtins.int """DEPRECATED a KB know can have multiple alive shards and is tracked in @@ -1638,11 +1625,9 @@ class Shards(google.protobuf.message.Message): """ similarity: nucliadb_protos.utils_pb2.VectorSimilarity.ValueType """DEPRECATED in favor of `model` to include more data""" - release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType - @property - def shards(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ShardObject]: ... @property def model(self) -> nucliadb_protos.knowledgebox_pb2.SemanticModelMetadata: ... + release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType @property def extra(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... def __init__( @@ -1656,12 +1641,12 @@ class Shards(google.protobuf.message.Message): release_channel: nucliadb_protos.utils_pb2.ReleaseChannel.ValueType = ..., extra: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["model", b"model"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["actual", b"actual", "extra", b"extra", "kbid", b"kbid", "model", b"model", "release_channel", b"release_channel", "shards", b"shards", "similarity", b"similarity"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["model", b"model"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["actual", b"actual", "extra", b"extra", "kbid", b"kbid", "model", b"model", "release_channel", b"release_channel", "shards", b"shards", "similarity", b"similarity"]) -> None: ... global___Shards = Shards -@typing.final +@typing_extensions.final class IndexResource(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1678,11 +1663,11 @@ class IndexResource(google.protobuf.message.Message): rid: builtins.str = ..., reindex_vectors: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid", "reindex_vectors", b"reindex_vectors", "rid", b"rid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "reindex_vectors", b"reindex_vectors", "rid", b"rid"]) -> None: ... global___IndexResource = IndexResource -@typing.final +@typing_extensions.final class IndexStatus(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1692,7 +1677,7 @@ class IndexStatus(google.protobuf.message.Message): global___IndexStatus = IndexStatus -@typing.final +@typing_extensions.final class SetVectorsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1700,10 +1685,10 @@ class SetVectorsRequest(google.protobuf.message.Message): KBID_FIELD_NUMBER: builtins.int RID_FIELD_NUMBER: builtins.int FIELD_FIELD_NUMBER: builtins.int - kbid: builtins.str - rid: builtins.str @property def vectors(self) -> nucliadb_protos.utils_pb2.VectorObject: ... + kbid: builtins.str + rid: builtins.str @property def field(self) -> nucliadb_protos.resources_pb2.FieldID: ... def __init__( @@ -1714,12 +1699,12 @@ class SetVectorsRequest(google.protobuf.message.Message): rid: builtins.str = ..., field: nucliadb_protos.resources_pb2.FieldID | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["field", b"field", "vectors", b"vectors"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["field", b"field", "kbid", b"kbid", "rid", b"rid", "vectors", b"vectors"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["field", b"field", "vectors", b"vectors"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["field", b"field", "kbid", b"kbid", "rid", b"rid", "vectors", b"vectors"]) -> None: ... global___SetVectorsRequest = SetVectorsRequest -@typing.final +@typing_extensions.final class SetVectorsResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1730,11 +1715,11 @@ class SetVectorsResponse(google.protobuf.message.Message): *, found: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["found", b"found"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["found", b"found"]) -> None: ... global___SetVectorsResponse = SetVectorsResponse -@typing.final +@typing_extensions.final class FileRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1748,11 +1733,11 @@ class FileRequest(google.protobuf.message.Message): bucket: builtins.str = ..., key: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["bucket", b"bucket", "key", b"key"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["bucket", b"bucket", "key", b"key"]) -> None: ... global___FileRequest = FileRequest -@typing.final +@typing_extensions.final class BinaryData(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1763,11 +1748,11 @@ class BinaryData(google.protobuf.message.Message): *, data: builtins.bytes = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["data", b"data"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["data", b"data"]) -> None: ... global___BinaryData = BinaryData -@typing.final +@typing_extensions.final class BinaryMetadata(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1790,11 +1775,11 @@ class BinaryMetadata(google.protobuf.message.Message): filename: builtins.str = ..., content_type: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["content_type", b"content_type", "filename", b"filename", "kbid", b"kbid", "key", b"key", "size", b"size"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["content_type", b"content_type", "filename", b"filename", "kbid", b"kbid", "key", b"key", "size", b"size"]) -> None: ... global___BinaryMetadata = BinaryMetadata -@typing.final +@typing_extensions.final class UploadBinaryData(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1802,9 +1787,9 @@ class UploadBinaryData(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int PAYLOAD_FIELD_NUMBER: builtins.int count: builtins.int - payload: builtins.bytes @property def metadata(self) -> global___BinaryMetadata: ... + payload: builtins.bytes def __init__( self, *, @@ -1812,13 +1797,13 @@ class UploadBinaryData(google.protobuf.message.Message): metadata: global___BinaryMetadata | None = ..., payload: builtins.bytes = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["data", b"data", "metadata", b"metadata", "payload", b"payload"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["count", b"count", "data", b"data", "metadata", b"metadata", "payload", b"payload"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["data", b"data"]) -> typing.Literal["metadata", "payload"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["data", b"data", "metadata", b"metadata", "payload", b"payload"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["count", b"count", "data", b"data", "metadata", b"metadata", "payload", b"payload"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["data", b"data"]) -> typing_extensions.Literal["metadata", "payload"] | None: ... global___UploadBinaryData = UploadBinaryData -@typing.final +@typing_extensions.final class FileUploaded(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1828,7 +1813,7 @@ class FileUploaded(google.protobuf.message.Message): global___FileUploaded = FileUploaded -@typing.final +@typing_extensions.final class SynonymsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1839,11 +1824,11 @@ class SynonymsRequest(google.protobuf.message.Message): *, kbid: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid"]) -> None: ... global___SynonymsRequest = SynonymsRequest -@typing.final +@typing_extensions.final class SetSynonymsRequest(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1859,12 +1844,12 @@ class SetSynonymsRequest(google.protobuf.message.Message): kbid: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID | None = ..., synonyms: nucliadb_protos.knowledgebox_pb2.Synonyms | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["kbid", b"kbid", "synonyms", b"synonyms"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["kbid", b"kbid", "synonyms", b"synonyms"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "synonyms", b"synonyms"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["kbid", b"kbid", "synonyms", b"synonyms"]) -> None: ... global___SetSynonymsRequest = SetSynonymsRequest -@typing.final +@typing_extensions.final class GetSynonymsResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1880,7 +1865,7 @@ class GetSynonymsResponse(google.protobuf.message.Message): status: global___OpStatusWriter | None = ..., synonyms: nucliadb_protos.knowledgebox_pb2.Synonyms | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["status", b"status", "synonyms", b"synonyms"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["status", b"status", "synonyms", b"synonyms"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["status", b"status", "synonyms", b"synonyms"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["status", b"status", "synonyms", b"synonyms"]) -> None: ... global___GetSynonymsResponse = GetSynonymsResponse diff --git a/nucliadb_protos/python/nucliadb_protos/writer_pb2_grpc.pyi b/nucliadb_protos/python/nucliadb_protos/writer_pb2_grpc.pyi index 3051da4598..7d883b91a9 100644 --- a/nucliadb_protos/python/nucliadb_protos/writer_pb2_grpc.pyi +++ b/nucliadb_protos/python/nucliadb_protos/writer_pb2_grpc.pyi @@ -2,14 +2,11 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ - import abc import collections.abc import grpc -import grpc.aio import nucliadb_protos.knowledgebox_pb2 import nucliadb_protos.writer_pb2 -import typing from nucliadb_protos.knowledgebox_pb2 import ( CONFLICT as CONFLICT, DeleteKnowledgeBoxResponse as DeleteKnowledgeBoxResponse, @@ -62,6 +59,7 @@ from nucliadb_protos.noderesources_pb2 import ( VectorSentence as VectorSentence, VectorSetID as VectorSetID, VectorSetList as VectorSetList, + VectorsetSentences as VectorsetSentences, ) from nucliadb_protos.resources_pb2 import ( AllFieldIDs as AllFieldIDs, @@ -138,466 +136,275 @@ from nucliadb_protos.resources_pb2 import ( VisualSelection as VisualSelection, ) -_T = typing.TypeVar("_T") - -class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... - -class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] - ... - class WriterStub: - def __init__(self, channel: typing.Union[grpc.Channel, grpc.aio.Channel]) -> None: ... + def __init__(self, channel: grpc.Channel) -> None: ... NewKnowledgeBox: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.knowledgebox_pb2.KnowledgeBoxNew, nucliadb_protos.knowledgebox_pb2.NewKnowledgeBoxResponse, ] - DeleteKnowledgeBox: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, nucliadb_protos.knowledgebox_pb2.DeleteKnowledgeBoxResponse, ] - UpdateKnowledgeBox: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.knowledgebox_pb2.KnowledgeBoxUpdate, nucliadb_protos.knowledgebox_pb2.UpdateKnowledgeBoxResponse, ] - GCKnowledgeBox: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, nucliadb_protos.knowledgebox_pb2.GCKnowledgeBoxResponse, ] - SetVectors: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.SetVectorsRequest, nucliadb_protos.writer_pb2.SetVectorsResponse, ] - ProcessMessage: grpc.StreamUnaryMultiCallable[ nucliadb_protos.writer_pb2.BrokerMessage, nucliadb_protos.writer_pb2.OpStatusWriter, ] - GetLabels: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.GetLabelsRequest, nucliadb_protos.writer_pb2.GetLabelsResponse, ] """Labels""" - GetLabelSet: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.GetLabelSetRequest, nucliadb_protos.writer_pb2.GetLabelSetResponse, ] - SetLabels: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.SetLabelsRequest, nucliadb_protos.writer_pb2.OpStatusWriter, ] - DelLabels: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.DelLabelsRequest, nucliadb_protos.writer_pb2.OpStatusWriter, ] - NewEntitiesGroup: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.NewEntitiesGroupRequest, nucliadb_protos.writer_pb2.NewEntitiesGroupResponse, ] """Entities""" - GetEntities: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.GetEntitiesRequest, nucliadb_protos.writer_pb2.GetEntitiesResponse, ] - GetEntitiesGroup: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.GetEntitiesGroupRequest, nucliadb_protos.writer_pb2.GetEntitiesGroupResponse, ] - ListEntitiesGroups: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.ListEntitiesGroupsRequest, nucliadb_protos.writer_pb2.ListEntitiesGroupsResponse, ] - SetEntities: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.SetEntitiesRequest, nucliadb_protos.writer_pb2.OpStatusWriter, ] - UpdateEntitiesGroup: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.UpdateEntitiesGroupRequest, nucliadb_protos.writer_pb2.UpdateEntitiesGroupResponse, ] - DelEntities: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.DelEntitiesRequest, nucliadb_protos.writer_pb2.OpStatusWriter, ] - GetSynonyms: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, nucliadb_protos.writer_pb2.GetSynonymsResponse, ] """Synonyms""" - SetSynonyms: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.SetSynonymsRequest, nucliadb_protos.writer_pb2.OpStatusWriter, ] - DelSynonyms: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, nucliadb_protos.writer_pb2.OpStatusWriter, ] - Status: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.WriterStatusRequest, nucliadb_protos.writer_pb2.WriterStatusResponse, ] - ListMembers: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.ListMembersRequest, nucliadb_protos.writer_pb2.ListMembersResponse, ] - Index: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.IndexResource, nucliadb_protos.writer_pb2.IndexStatus, ] - ReIndex: grpc.UnaryUnaryMultiCallable[ nucliadb_protos.writer_pb2.IndexResource, nucliadb_protos.writer_pb2.IndexStatus, ] - DownloadFile: grpc.UnaryStreamMultiCallable[ nucliadb_protos.writer_pb2.FileRequest, nucliadb_protos.writer_pb2.BinaryData, ] - UploadFile: grpc.StreamUnaryMultiCallable[ nucliadb_protos.writer_pb2.UploadBinaryData, nucliadb_protos.writer_pb2.FileUploaded, ] -class WriterAsyncStub: - NewKnowledgeBox: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.knowledgebox_pb2.KnowledgeBoxNew, - nucliadb_protos.knowledgebox_pb2.NewKnowledgeBoxResponse, - ] - - DeleteKnowledgeBox: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - nucliadb_protos.knowledgebox_pb2.DeleteKnowledgeBoxResponse, - ] - - UpdateKnowledgeBox: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.knowledgebox_pb2.KnowledgeBoxUpdate, - nucliadb_protos.knowledgebox_pb2.UpdateKnowledgeBoxResponse, - ] - - GCKnowledgeBox: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - nucliadb_protos.knowledgebox_pb2.GCKnowledgeBoxResponse, - ] - - SetVectors: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.SetVectorsRequest, - nucliadb_protos.writer_pb2.SetVectorsResponse, - ] - - ProcessMessage: grpc.aio.StreamUnaryMultiCallable[ - nucliadb_protos.writer_pb2.BrokerMessage, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - GetLabels: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.GetLabelsRequest, - nucliadb_protos.writer_pb2.GetLabelsResponse, - ] - """Labels""" - - GetLabelSet: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.GetLabelSetRequest, - nucliadb_protos.writer_pb2.GetLabelSetResponse, - ] - - SetLabels: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.SetLabelsRequest, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - DelLabels: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.DelLabelsRequest, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - NewEntitiesGroup: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.NewEntitiesGroupRequest, - nucliadb_protos.writer_pb2.NewEntitiesGroupResponse, - ] - """Entities""" - - GetEntities: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.GetEntitiesRequest, - nucliadb_protos.writer_pb2.GetEntitiesResponse, - ] - - GetEntitiesGroup: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.GetEntitiesGroupRequest, - nucliadb_protos.writer_pb2.GetEntitiesGroupResponse, - ] - - ListEntitiesGroups: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.ListEntitiesGroupsRequest, - nucliadb_protos.writer_pb2.ListEntitiesGroupsResponse, - ] - - SetEntities: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.SetEntitiesRequest, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - UpdateEntitiesGroup: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.UpdateEntitiesGroupRequest, - nucliadb_protos.writer_pb2.UpdateEntitiesGroupResponse, - ] - - DelEntities: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.DelEntitiesRequest, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - GetSynonyms: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - nucliadb_protos.writer_pb2.GetSynonymsResponse, - ] - """Synonyms""" - - SetSynonyms: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.SetSynonymsRequest, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - DelSynonyms: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - nucliadb_protos.writer_pb2.OpStatusWriter, - ] - - Status: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.WriterStatusRequest, - nucliadb_protos.writer_pb2.WriterStatusResponse, - ] - - ListMembers: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.ListMembersRequest, - nucliadb_protos.writer_pb2.ListMembersResponse, - ] - - Index: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.IndexResource, - nucliadb_protos.writer_pb2.IndexStatus, - ] - - ReIndex: grpc.aio.UnaryUnaryMultiCallable[ - nucliadb_protos.writer_pb2.IndexResource, - nucliadb_protos.writer_pb2.IndexStatus, - ] - - DownloadFile: grpc.aio.UnaryStreamMultiCallable[ - nucliadb_protos.writer_pb2.FileRequest, - nucliadb_protos.writer_pb2.BinaryData, - ] - - UploadFile: grpc.aio.StreamUnaryMultiCallable[ - nucliadb_protos.writer_pb2.UploadBinaryData, - nucliadb_protos.writer_pb2.FileUploaded, - ] - class WriterServicer(metaclass=abc.ABCMeta): @abc.abstractmethod def NewKnowledgeBox( self, request: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxNew, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.knowledgebox_pb2.NewKnowledgeBoxResponse, collections.abc.Awaitable[nucliadb_protos.knowledgebox_pb2.NewKnowledgeBoxResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.knowledgebox_pb2.NewKnowledgeBoxResponse: ... @abc.abstractmethod def DeleteKnowledgeBox( self, request: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.knowledgebox_pb2.DeleteKnowledgeBoxResponse, collections.abc.Awaitable[nucliadb_protos.knowledgebox_pb2.DeleteKnowledgeBoxResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.knowledgebox_pb2.DeleteKnowledgeBoxResponse: ... @abc.abstractmethod def UpdateKnowledgeBox( self, request: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxUpdate, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.knowledgebox_pb2.UpdateKnowledgeBoxResponse, collections.abc.Awaitable[nucliadb_protos.knowledgebox_pb2.UpdateKnowledgeBoxResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.knowledgebox_pb2.UpdateKnowledgeBoxResponse: ... @abc.abstractmethod def GCKnowledgeBox( self, request: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.knowledgebox_pb2.GCKnowledgeBoxResponse, collections.abc.Awaitable[nucliadb_protos.knowledgebox_pb2.GCKnowledgeBoxResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.knowledgebox_pb2.GCKnowledgeBoxResponse: ... @abc.abstractmethod def SetVectors( self, request: nucliadb_protos.writer_pb2.SetVectorsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.SetVectorsResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.SetVectorsResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.SetVectorsResponse: ... @abc.abstractmethod def ProcessMessage( self, - request_iterator: _MaybeAsyncIterator[nucliadb_protos.writer_pb2.BrokerMessage], - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + request_iterator: collections.abc.Iterator[nucliadb_protos.writer_pb2.BrokerMessage], + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def GetLabels( self, request: nucliadb_protos.writer_pb2.GetLabelsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetLabelsResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetLabelsResponse]]: + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetLabelsResponse: """Labels""" - @abc.abstractmethod def GetLabelSet( self, request: nucliadb_protos.writer_pb2.GetLabelSetRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetLabelSetResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetLabelSetResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetLabelSetResponse: ... @abc.abstractmethod def SetLabels( self, request: nucliadb_protos.writer_pb2.SetLabelsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def DelLabels( self, request: nucliadb_protos.writer_pb2.DelLabelsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def NewEntitiesGroup( self, request: nucliadb_protos.writer_pb2.NewEntitiesGroupRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.NewEntitiesGroupResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.NewEntitiesGroupResponse]]: + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.NewEntitiesGroupResponse: """Entities""" - @abc.abstractmethod def GetEntities( self, request: nucliadb_protos.writer_pb2.GetEntitiesRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetEntitiesResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetEntitiesResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetEntitiesResponse: ... @abc.abstractmethod def GetEntitiesGroup( self, request: nucliadb_protos.writer_pb2.GetEntitiesGroupRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetEntitiesGroupResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetEntitiesGroupResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetEntitiesGroupResponse: ... @abc.abstractmethod def ListEntitiesGroups( self, request: nucliadb_protos.writer_pb2.ListEntitiesGroupsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.ListEntitiesGroupsResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.ListEntitiesGroupsResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.ListEntitiesGroupsResponse: ... @abc.abstractmethod def SetEntities( self, request: nucliadb_protos.writer_pb2.SetEntitiesRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def UpdateEntitiesGroup( self, request: nucliadb_protos.writer_pb2.UpdateEntitiesGroupRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.UpdateEntitiesGroupResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.UpdateEntitiesGroupResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.UpdateEntitiesGroupResponse: ... @abc.abstractmethod def DelEntities( self, request: nucliadb_protos.writer_pb2.DelEntitiesRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def GetSynonyms( self, request: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.GetSynonymsResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.GetSynonymsResponse]]: + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.GetSynonymsResponse: """Synonyms""" - @abc.abstractmethod def SetSynonyms( self, request: nucliadb_protos.writer_pb2.SetSynonymsRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def DelSynonyms( self, request: nucliadb_protos.knowledgebox_pb2.KnowledgeBoxID, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.OpStatusWriter, collections.abc.Awaitable[nucliadb_protos.writer_pb2.OpStatusWriter]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.OpStatusWriter: ... @abc.abstractmethod def Status( self, request: nucliadb_protos.writer_pb2.WriterStatusRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.WriterStatusResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.WriterStatusResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.WriterStatusResponse: ... @abc.abstractmethod def ListMembers( self, request: nucliadb_protos.writer_pb2.ListMembersRequest, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.ListMembersResponse, collections.abc.Awaitable[nucliadb_protos.writer_pb2.ListMembersResponse]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.ListMembersResponse: ... @abc.abstractmethod def Index( self, request: nucliadb_protos.writer_pb2.IndexResource, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.IndexStatus, collections.abc.Awaitable[nucliadb_protos.writer_pb2.IndexStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.IndexStatus: ... @abc.abstractmethod def ReIndex( self, request: nucliadb_protos.writer_pb2.IndexResource, - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.IndexStatus, collections.abc.Awaitable[nucliadb_protos.writer_pb2.IndexStatus]]: ... - + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.IndexStatus: ... @abc.abstractmethod def DownloadFile( self, request: nucliadb_protos.writer_pb2.FileRequest, - context: _ServicerContext, - ) -> typing.Union[collections.abc.Iterator[nucliadb_protos.writer_pb2.BinaryData], collections.abc.AsyncIterator[nucliadb_protos.writer_pb2.BinaryData]]: ... - + context: grpc.ServicerContext, + ) -> collections.abc.Iterator[nucliadb_protos.writer_pb2.BinaryData]: ... @abc.abstractmethod def UploadFile( self, - request_iterator: _MaybeAsyncIterator[nucliadb_protos.writer_pb2.UploadBinaryData], - context: _ServicerContext, - ) -> typing.Union[nucliadb_protos.writer_pb2.FileUploaded, collections.abc.Awaitable[nucliadb_protos.writer_pb2.FileUploaded]]: ... + request_iterator: collections.abc.Iterator[nucliadb_protos.writer_pb2.UploadBinaryData], + context: grpc.ServicerContext, + ) -> nucliadb_protos.writer_pb2.FileUploaded: ... -def add_WriterServicer_to_server(servicer: WriterServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... +def add_WriterServicer_to_server(servicer: WriterServicer, server: grpc.Server) -> None: ... diff --git a/nucliadb_protos/rust/src/noderesources.rs b/nucliadb_protos/rust/src/noderesources.rs index 6acf235e22..3bae19c24c 100644 --- a/nucliadb_protos/rust/src/noderesources.rs +++ b/nucliadb_protos/rust/src/noderesources.rs @@ -278,6 +278,16 @@ pub struct VectorSentence { } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] +pub struct VectorsetSentences { + /// key is full id for vectors + #[prost(map = "string, message", tag = "1")] + pub sentences: ::std::collections::HashMap< + ::prost::alloc::string::String, + VectorSentence, + >, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ParagraphMetadata { #[prost(message, optional, tag = "1")] pub position: ::core::option::Option, @@ -304,6 +314,12 @@ pub struct IndexParagraph { ::prost::alloc::string::String, VectorSentence, >, + /// key is vectorset id + #[prost(map = "string, message", tag = "10")] + pub vectorsets_sentences: ::std::collections::HashMap< + ::prost::alloc::string::String, + VectorsetSentences, + >, #[prost(string, tag = "5")] pub field: ::prost::alloc::string::String, /// split were it belongs @@ -330,7 +346,7 @@ pub struct VectorSetList { #[prost(message, optional, tag = "1")] pub shard: ::core::option::Option, #[prost(string, repeated, tag = "2")] - pub vectorset: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + pub vectorsets: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/nucliadb_protos/rust/src/nodewriter.rs b/nucliadb_protos/rust/src/nodewriter.rs index c1aeabbdcf..2b66541d87 100644 --- a/nucliadb_protos/rust/src/nodewriter.rs +++ b/nucliadb_protos/rust/src/nodewriter.rs @@ -151,6 +151,9 @@ pub struct NewVectorSetRequest { pub id: ::core::option::Option, #[prost(enumeration = "super::utils::VectorSimilarity", tag = "2")] pub similarity: i32, + /// indicates whether the shard should normalize vectors on indexing or not + #[prost(bool, tag = "3")] + pub normalize_vectors: bool, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/nucliadb_vectors/src/service/reader.rs b/nucliadb_vectors/src/service/reader.rs index 02a3f0d53b..ce119dccf4 100644 --- a/nucliadb_vectors/src/service/reader.rs +++ b/nucliadb_vectors/src/service/reader.rs @@ -182,7 +182,7 @@ mod tests { use nucliadb_core::protos::resource::ResourceStatus; use nucliadb_core::protos::{ - IndexParagraph, IndexParagraphs, Resource, ResourceId, VectorSentence, VectorSimilarity, + IndexParagraph, IndexParagraphs, Resource, ResourceId, VectorSentence, VectorSimilarity, VectorsetSentences, }; use nucliadb_core::Channel; use tempfile::TempDir; @@ -223,7 +223,13 @@ mod tests { let paragraph = IndexParagraph { start: 0, end: 0, - sentences, + sentences: sentences.clone(), + vectorsets_sentences: HashMap::from([( + "__default__".to_string(), + VectorsetSentences { + sentences, + }, + )]), field: "".to_string(), labels: vec!["1".to_string()], index: 3, @@ -307,7 +313,13 @@ mod tests { let paragraph = IndexParagraph { start: 0, end: 0, - sentences, + sentences: sentences.clone(), + vectorsets_sentences: HashMap::from([( + "__default__".to_string(), + VectorsetSentences { + sentences, + }, + )]), field: "".to_string(), labels: vec!["1".to_string()], index: 3, @@ -426,7 +438,13 @@ mod tests { let paragraph = IndexParagraph { start: 0, end: 0, - sentences, + sentences: sentences.clone(), + vectorsets_sentences: HashMap::from([( + "__default__".to_string(), + VectorsetSentences { + sentences, + }, + )]), field: "".to_string(), labels: vec!["1".to_string()], index: 3, diff --git a/nucliadb_vectors/src/service/writer.rs b/nucliadb_vectors/src/service/writer.rs index 927db16972..2d17825111 100644 --- a/nucliadb_vectors/src/service/writer.rs +++ b/nucliadb_vectors/src/service/writer.rs @@ -251,7 +251,7 @@ impl VectorWriterService { mod tests { use nucliadb_core::protos::resource::ResourceStatus; use nucliadb_core::protos::{ - IndexParagraph, IndexParagraphs, Resource, ResourceId, VectorSentence, VectorSimilarity, + IndexParagraph, IndexParagraphs, Resource, ResourceId, VectorSentence, VectorSimilarity, VectorsetSentences, }; use nucliadb_core::Channel; use std::collections::HashMap; @@ -290,7 +290,13 @@ mod tests { let paragraph = IndexParagraph { start: 0, end: 0, - sentences, + sentences: sentences.clone(), + vectorsets_sentences: HashMap::from([( + "__default__".to_string(), + VectorsetSentences { + sentences, + }, + )]), field: "".to_string(), labels: vec!["1".to_string(), "2".to_string(), "3".to_string()], index: 3, @@ -353,7 +359,13 @@ mod tests { let paragraph = IndexParagraph { start: 0, end: 0, - sentences, + sentences: sentences.clone(), + vectorsets_sentences: HashMap::from([( + "__default__".to_string(), + VectorsetSentences { + sentences, + }, + )]), field: "".to_string(), labels: vec!["1".to_string(), "2".to_string(), "3".to_string()], index: 3,