Skip to content

Commit

Permalink
refactor: rename duplicate and conflict
Browse files Browse the repository at this point in the history
  • Loading branch information
apskhem committed Mar 12, 2024
1 parent 20e5410 commit a944938
Show file tree
Hide file tree
Showing 4 changed files with 51 additions and 51 deletions.
56 changes: 28 additions & 28 deletions src/analyzer/err.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,36 +23,36 @@ pub enum Err {
ArrayPrimaryKey,
#[display(fmt = "Invalid indexes definition setting: can specify either 'pk', 'unique', or 'type' within a setting")]
InvalidIndexesSetting,
#[display(fmt = "Duplicated attribute key")]
DuplicatedAttributeKey,
#[display(fmt = "Duplicated property key")]
DuplicatedPropertyKey,
#[display(fmt = "Duplicated project setting")]
DuplicatedProjectSetting,
#[display(fmt = "Duplicated primary key")]
DuplicatedPrimaryKey,
#[display(fmt = "Duplicated unique key")]
DuplicatedUniqueKey,
#[display(fmt = "Duplicated index key")]
DuplicatedIndexKey,
#[display(fmt = "Duplicated table name")]
DuplicatedTableName,
#[display(fmt = "Duplicated column name")]
DuplicatedColumnName,
#[display(fmt = "Duplicate attribute key")]
DuplicateAttributeKey,
#[display(fmt = "Duplicate property key")]
DuplicatePropertyKey,
#[display(fmt = "Duplicate project setting")]
DuplicateProjectSetting,
#[display(fmt = "Duplicate primary key")]
DuplicatePrimaryKey,
#[display(fmt = "Duplicate unique key")]
DuplicateUniqueKey,
#[display(fmt = "Duplicate index key")]
DuplicateIndexKey,
#[display(fmt = "Duplicate table name")]
DuplicateTableName,
#[display(fmt = "Duplicate column name")]
DuplicateColumnName,
#[display(fmt = "Conflict relation")]
ConflictRelation,
#[display(fmt = "Duplicated enum name")]
DuplicatedEnumName,
#[display(fmt = "Duplicated enum value")]
DuplicatedEnumValue,
#[display(fmt = "Duplicated table group name")]
DuplicatedTableGroupName,
#[display(fmt = "Duplicated table group item")]
DuplicatedTableGroupItem,
#[display(fmt = "Duplicated alias")]
DuplicatedAlias,
#[display(fmt = "Conflicted nullable setting: 'null' and 'not null' must not appear within the same setting")]
ConflictedNullableSetting,
#[display(fmt = "Duplicate enum name")]
DuplicateEnumName,
#[display(fmt = "Duplicate enum value")]
DuplicateEnumValue,
#[display(fmt = "Duplicate table group name")]
DuplicateTableGroupName,
#[display(fmt = "Duplicate table group item")]
DuplicateTableGroupItem,
#[display(fmt = "Duplicate alias")]
DuplicateAlias,
#[display(fmt = "Conflict nullable setting: 'null' and 'not null' must not appear within the same setting")]
ConflictNullableSetting,
#[display(fmt = "Project setting not found")]
ProjectSettingNotFound,
#[display(fmt = "Empty indexes block")]
Expand Down
4 changes: 2 additions & 2 deletions src/analyzer/helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pub(super) fn check_attr_duplicate_keys(attrs: &Vec<Attribute>, input: &str) ->

// TODO: handle multiple errs
if let Some(attr) = dup_keys.first() {
throw_err(Err::DuplicatedAttributeKey, &attr.span_range, input)?;
throw_err(Err::DuplicateAttributeKey, &attr.span_range, input)?;
}

Ok(())
Expand All @@ -28,7 +28,7 @@ pub(super) fn check_prop_duplicate_keys(attrs: &Vec<Property>, input: &str) -> A

// TODO: handle multiple errs
if let Some(prop) = dup_keys.first() {
throw_err(Err::DuplicatedPropertyKey, &prop.span_range, input)?;
throw_err(Err::DuplicatePropertyKey, &prop.span_range, input)?;
}

Ok(())
Expand Down
28 changes: 14 additions & 14 deletions src/analyzer/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ impl Indexer {
///
/// # Errors
///
/// - `DuplicatedTableName`
/// - `DuplicatedColumnName`
/// - `DuplicatedAlias`
/// - `DuplicateTableName`
/// - `DuplicateColumnName`
/// - `DuplicateAlias`
pub(super) fn index_table(&mut self, tables: &Vec<&TableBlock>, input: &str) -> AnalyzerResult<()> {
for table in tables {
let TableIdent {
Expand All @@ -50,13 +50,13 @@ impl Indexer {
.unwrap_or_else(|| DEFAULT_SCHEMA.to_string());

if self.contains_table(&schema, &name.to_string) {
throw_err(Err::DuplicatedTableName, span_range, input)?;
throw_err(Err::DuplicateTableName, span_range, input)?;
}

let mut indexed_cols = BTreeSet::new();
for col in table.cols.iter() {
match indexed_cols.get(&col.name.to_string) {
Some(_) => throw_err(Err::DuplicatedColumnName, &col.span_range, input)?,
Some(_) => throw_err(Err::DuplicateColumnName, &col.span_range, input)?,
None => indexed_cols.insert(col.name.to_string.clone()),
};
}
Expand All @@ -68,7 +68,7 @@ impl Indexer {
if let Some(alias) = alias {
match self.table_alias_map.get(&alias.to_string) {
Some(_) => {
throw_err(Err::DuplicatedAlias, &alias.span_range, input)?;
throw_err(Err::DuplicateAlias, &alias.span_range, input)?;
}
None => {
self
Expand Down Expand Up @@ -101,8 +101,8 @@ impl Indexer {
///
/// # Errors
///
/// - `DuplicatedEnumName`
/// - `DuplicatedEnumValue`
/// - `DuplicateEnumName`
/// - `DuplicateEnumValue`
pub(super) fn index_enums(&mut self, enums: &Vec<&EnumBlock>, input: &str) -> AnalyzerResult<()> {
for r#enum in enums.iter() {
let EnumIdent {
Expand All @@ -118,13 +118,13 @@ impl Indexer {
.unwrap_or_else(|| DEFAULT_SCHEMA.into());

if self.contains_enum(&schema, &name.to_string) {
throw_err(Err::DuplicatedEnumName, &span_range, input)?;
throw_err(Err::DuplicateEnumName, &span_range, input)?;
}

let mut value_sets = BTreeSet::new();
for value in r#enum.values.iter() {
match value_sets.get(&value.value.to_string) {
Some(_) => throw_err(Err::DuplicatedEnumValue, &value.span_range, input)?,
Some(_) => throw_err(Err::DuplicateEnumValue, &value.span_range, input)?,
None => value_sets.insert(value.value.to_string.clone()),
};
}
Expand All @@ -150,13 +150,13 @@ impl Indexer {
///
/// # Errors
///
/// - `DuplicatedTableGroupName`
/// - `DuplicateTableGroupName`
/// - `TableNotFound`
/// - `DuplicatedTableGroupItem`
/// - `DuplicateTableGroupItem`
pub(super) fn index_table_groups(&mut self, table_groups: &Vec<&TableGroupBlock>, input: &str) -> AnalyzerResult<()> {
for table_group in table_groups {
if self.table_group_map.get(&table_group.ident.to_string).is_some() {
throw_err(Err::DuplicatedTableGroupName, &table_group.ident.span_range, input)?;
throw_err(Err::DuplicateTableGroupName, &table_group.ident.span_range, input)?;
}

let mut indexed_items = BTreeSet::new();
Expand Down Expand Up @@ -184,7 +184,7 @@ impl Indexer {
};

match indexed_items.get(&ident) {
Some(_) => throw_err(Err::DuplicatedTableGroupItem, &group_item.span_range, input)?,
Some(_) => throw_err(Err::DuplicateTableGroupItem, &group_item.span_range, input)?,
None => indexed_items.insert(ident),
};
}
Expand Down
14 changes: 7 additions & 7 deletions src/analyzer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {

// check project block
if project.len() > 1 {
throw_err(Err::DuplicatedProjectSetting, &schema_block.span_range, input)?;
throw_err(Err::DuplicateProjectSetting, &schema_block.span_range, input)?;
}
match project.first() {
Some(project_block) => {
Expand All @@ -102,7 +102,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {
if let Some(settings) = &col.settings {
if settings.is_pk {
if !tmp_table_indexer.pk_list.is_empty() {
throw_err(Err::DuplicatedPrimaryKey, &col.span_range, input)?;
throw_err(Err::DuplicatePrimaryKey, &col.span_range, input)?;
}
if settings.nullable == Some(Nullable::Null) {
throw_err(Err::NullablePrimaryKey, &col.span_range, input)?;
Expand All @@ -129,7 +129,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {
.collect();

if filtered.len() == 2 {
throw_err(Err::ConflictedNullableSetting, &settings.span_range, input)?;
throw_err(Err::ConflictNullableSetting, &settings.span_range, input)?;
}
}

Expand Down Expand Up @@ -184,7 +184,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {

if settings.is_pk {
if !tmp_table_indexer.pk_list.is_empty() {
throw_err(Err::DuplicatedPrimaryKey, &def.span_range, input)?;
throw_err(Err::DuplicatePrimaryKey, &def.span_range, input)?;
}

tmp_table_indexer.pk_list.extend(ident_strings.clone())
Expand All @@ -194,7 +194,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {
.iter()
.any(|uniq_item| idents.iter().all(|id| uniq_item.contains(&id.to_string)))
{
throw_err(Err::DuplicatedUniqueKey, &def.span_range, input)?;
throw_err(Err::DuplicateUniqueKey, &def.span_range, input)?;
}

tmp_table_indexer
Expand All @@ -208,7 +208,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {
.iter()
.any(|(idx_item, idx_type)| idx_item == &ident_strings && idx_type == &settings.r#type)
{
throw_err(Err::DuplicatedIndexKey, &def.span_range, input)?;
throw_err(Err::DuplicateIndexKey, &def.span_range, input)?;
}

tmp_table_indexer
Expand All @@ -222,7 +222,7 @@ pub fn analyze(schema_block: &SchemaBlock) -> AnalyzerResult<AnalyzedIndexer> {
.iter()
.any(|(idx_item, _)| idx_item == &ident_strings)
{
throw_err(Err::DuplicatedIndexKey, &def.span_range, input)?;
throw_err(Err::DuplicateIndexKey, &def.span_range, input)?;
}

tmp_table_indexer.indexed_list.push((ident_strings, None))
Expand Down

0 comments on commit a944938

Please sign in to comment.