mirror of
https://github.com/teloxide/teloxide.git
synced 2024-12-22 14:35:36 +01:00
commit
2fdcd4153e
12 changed files with 719 additions and 688 deletions
|
@ -6,6 +6,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## unreleased
|
||||
|
||||
### Removed
|
||||
|
||||
- Remove `derive(DialogueState)` macro
|
||||
|
||||
### Changed
|
||||
|
||||
- `#[command(rename = "a_name_that_is_not_a_case_name")]` doesn't work anymore
|
||||
|
||||
## 0.6.3 - 2022-07-19
|
||||
|
||||
### Fixed
|
||||
|
|
180
src/attr.rs
180
src/attr.rs
|
@ -1,68 +1,156 @@
|
|||
use crate::{error::compile_error_at, Result};
|
||||
|
||||
use proc_macro2::Span;
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
LitStr, Token,
|
||||
parse::{Parse, ParseBuffer, ParseStream},
|
||||
spanned::Spanned,
|
||||
Attribute, Ident, Lit, Path, Token,
|
||||
};
|
||||
|
||||
pub enum BotCommandAttribute {
|
||||
Prefix,
|
||||
Description,
|
||||
RenameRule,
|
||||
CustomParser,
|
||||
Separator,
|
||||
pub(crate) fn fold_attrs<A, R>(
|
||||
attrs: &[Attribute],
|
||||
filter: fn(&Attribute) -> bool,
|
||||
parse: impl Fn(Attr) -> Result<R>,
|
||||
init: A,
|
||||
f: impl Fn(A, R) -> Result<A>,
|
||||
) -> Result<A> {
|
||||
attrs
|
||||
.iter()
|
||||
.filter(|&a| filter(a))
|
||||
.flat_map(|attribute| {
|
||||
// FIXME: don't allocate here
|
||||
let attrs =
|
||||
match attribute.parse_args_with(|input: &ParseBuffer| {
|
||||
input.parse_terminated::<_, Token![,]>(Attr::parse)
|
||||
}) {
|
||||
Ok(ok) => ok,
|
||||
Err(err) => return vec![Err(err.into())],
|
||||
};
|
||||
|
||||
attrs.into_iter().map(&parse).collect()
|
||||
})
|
||||
.try_fold(init, |acc, r| r.and_then(|r| f(acc, r)))
|
||||
}
|
||||
|
||||
impl Parse for BotCommandAttribute {
|
||||
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
|
||||
let name_arg: syn::Ident = input.parse()?;
|
||||
match name_arg.to_string().as_str() {
|
||||
"prefix" => Ok(BotCommandAttribute::Prefix),
|
||||
"description" => Ok(BotCommandAttribute::Description),
|
||||
"rename" => Ok(BotCommandAttribute::RenameRule),
|
||||
"parse_with" => Ok(BotCommandAttribute::CustomParser),
|
||||
"separator" => Ok(BotCommandAttribute::Separator),
|
||||
_ => Err(syn::Error::new(name_arg.span(), "unexpected argument")),
|
||||
}
|
||||
}
|
||||
/// An attribute key-value pair.
|
||||
///
|
||||
/// For example:
|
||||
/// ```text
|
||||
/// #[blahblah(key = "puff", value = 12, nope)]
|
||||
/// ^^^^^^^^^^^^ ^^^^^^^^^^ ^^^^
|
||||
/// ```
|
||||
pub(crate) struct Attr {
|
||||
pub key: Ident,
|
||||
pub value: AttrValue,
|
||||
}
|
||||
|
||||
pub struct Attr {
|
||||
name: BotCommandAttribute,
|
||||
value: String,
|
||||
/// Value of an attribute.
|
||||
///
|
||||
/// For example:
|
||||
/// ```text
|
||||
/// #[blahblah(key = "puff", value = 12, nope)]
|
||||
/// ^^^^^^ ^^ ^-- (None pseudo-value)
|
||||
/// ```
|
||||
pub(crate) enum AttrValue {
|
||||
Path(Path),
|
||||
Lit(Lit),
|
||||
None(Span),
|
||||
}
|
||||
|
||||
impl Parse for Attr {
|
||||
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
|
||||
let name = input.parse::<BotCommandAttribute>()?;
|
||||
input.parse::<Token![=]>()?;
|
||||
let value = input.parse::<LitStr>()?.value();
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let key = input.parse::<Ident>()?;
|
||||
|
||||
Ok(Self { name, value })
|
||||
let value = match input.peek(Token![=]) {
|
||||
true => {
|
||||
input.parse::<Token![=]>()?;
|
||||
input.parse::<AttrValue>()?
|
||||
}
|
||||
false => AttrValue::None(input.span()),
|
||||
};
|
||||
|
||||
Ok(Self { key, value })
|
||||
}
|
||||
}
|
||||
|
||||
impl Attr {
|
||||
pub fn name(&self) -> &BotCommandAttribute {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn value(&self) -> String {
|
||||
self.value.clone()
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
self.key
|
||||
.span()
|
||||
.join(self.value.span())
|
||||
.unwrap_or_else(|| self.key.span())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VecAttrs {
|
||||
pub data: Vec<Attr>,
|
||||
}
|
||||
impl AttrValue {
|
||||
/// Unwraps this value if it's a string literal.
|
||||
pub fn expect_string(self) -> Result<String> {
|
||||
self.expect("a string", |this| match this {
|
||||
AttrValue::Lit(Lit::Str(s)) => Ok(s.value()),
|
||||
_ => Err(this),
|
||||
})
|
||||
}
|
||||
|
||||
impl Parse for VecAttrs {
|
||||
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
|
||||
let mut data = vec![];
|
||||
while !input.is_empty() {
|
||||
data.push(input.parse()?);
|
||||
if !input.is_empty() {
|
||||
input.parse::<Token![,]>()?;
|
||||
}
|
||||
// /// Unwraps this value if it's a path.
|
||||
// pub fn expect_path(self) -> Result<Path> {
|
||||
// self.expect("a path", |this| match this {
|
||||
// AttrValue::Path(p) => Ok(p),
|
||||
// _ => Err(this),
|
||||
// })
|
||||
// }
|
||||
|
||||
fn expect<T>(
|
||||
self,
|
||||
expected: &str,
|
||||
f: impl FnOnce(Self) -> Result<T, Self>,
|
||||
) -> Result<T> {
|
||||
f(self).map_err(|this| {
|
||||
compile_error_at(
|
||||
&format!("expected {expected}, found {}", this.descr()),
|
||||
this.span(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn descr(&self) -> &'static str {
|
||||
use Lit::*;
|
||||
|
||||
match self {
|
||||
Self::None(_) => "nothing",
|
||||
Self::Lit(l) => match l {
|
||||
Str(_) | ByteStr(_) => "a string",
|
||||
Char(_) => "a character",
|
||||
Byte(_) | Int(_) => "an integer",
|
||||
Float(_) => "a floating point integer",
|
||||
Bool(_) => "a boolean",
|
||||
Verbatim(_) => ":shrug:",
|
||||
},
|
||||
Self::Path(_) => "a path",
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns span of the value
|
||||
///
|
||||
/// ```text
|
||||
/// #[blahblah(key = "puff", value = 12, nope )]
|
||||
/// ^^^^^^ ^^ ^
|
||||
/// ```
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Path(p) => p.span(),
|
||||
Self::Lit(l) => l.span(),
|
||||
Self::None(sp) => *sp,
|
||||
}
|
||||
Ok(Self { data })
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for AttrValue {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let this = match input.peek(Lit) {
|
||||
true => Self::Lit(input.parse()?),
|
||||
false => Self::Path(input.parse()?),
|
||||
};
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
}
|
||||
|
|
148
src/bot_commands.rs
Normal file
148
src/bot_commands.rs
Normal file
|
@ -0,0 +1,148 @@
|
|||
use crate::{
|
||||
command::Command, command_attr::CommandAttrs, command_enum::CommandEnum,
|
||||
compile_error, fields_parse::impl_parse_args, unzip::Unzip, Result,
|
||||
};
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::DeriveInput;
|
||||
|
||||
pub(crate) fn bot_commands_impl(input: DeriveInput) -> Result<TokenStream> {
|
||||
let data_enum = get_enum_data(&input)?;
|
||||
let enum_attrs = CommandAttrs::from_attributes(&input.attrs)?;
|
||||
let command_enum = CommandEnum::try_from(enum_attrs)?;
|
||||
|
||||
let Unzip(var_init, var_info) = data_enum
|
||||
.variants
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
let attrs = CommandAttrs::from_attributes(&variant.attrs)?;
|
||||
let command = Command::try_from(attrs, &variant.ident.to_string())?;
|
||||
|
||||
let variant_name = &variant.ident;
|
||||
let self_variant = quote! { Self::#variant_name };
|
||||
|
||||
let parser =
|
||||
command.parser.as_ref().unwrap_or(&command_enum.parser_type);
|
||||
let parse = impl_parse_args(&variant.fields, self_variant, parser);
|
||||
|
||||
Ok((parse, command))
|
||||
})
|
||||
.collect::<Result<Unzip<Vec<_>, Vec<_>>>>()?;
|
||||
|
||||
let type_name = &input.ident;
|
||||
let fn_descriptions = impl_descriptions(&var_info, &command_enum);
|
||||
let fn_parse = impl_parse(&var_info, &command_enum, &var_init);
|
||||
let fn_commands = impl_commands(&var_info, &command_enum);
|
||||
|
||||
let trait_impl = quote! {
|
||||
impl BotCommands for #type_name {
|
||||
#fn_descriptions
|
||||
#fn_parse
|
||||
#fn_commands
|
||||
}
|
||||
};
|
||||
|
||||
Ok(trait_impl)
|
||||
}
|
||||
|
||||
fn impl_commands(
|
||||
infos: &[Command],
|
||||
global: &CommandEnum,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let commands = infos
|
||||
.iter()
|
||||
.filter(|command| command.description_is_enabled())
|
||||
.map(|command| {
|
||||
let c = command.get_matched_value(global);
|
||||
let d = command.description.as_deref().unwrap_or_default();
|
||||
quote! { BotCommand::new(#c,#d) }
|
||||
});
|
||||
|
||||
quote! {
|
||||
fn bot_commands() -> Vec<teloxide::types::BotCommand> {
|
||||
use teloxide::types::BotCommand;
|
||||
vec![#(#commands),*]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn impl_descriptions(
|
||||
infos: &[Command],
|
||||
global: &CommandEnum,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let command_descriptions = infos
|
||||
.iter()
|
||||
.filter(|command| command.description_is_enabled())
|
||||
.map(|c| {
|
||||
let (prefix, command) = c.get_matched_value2(global);
|
||||
let description = c.description.clone().unwrap_or_default();
|
||||
quote! { CommandDescription { prefix: #prefix, command: #command, description: #description } }
|
||||
});
|
||||
|
||||
let global_description = match global.description.as_deref() {
|
||||
Some(gd) => quote! { .global_description(#gd) },
|
||||
None => quote! {},
|
||||
};
|
||||
|
||||
quote! {
|
||||
fn descriptions() -> teloxide::utils::command::CommandDescriptions<'static> {
|
||||
use teloxide::utils::command::{CommandDescriptions, CommandDescription};
|
||||
use std::borrow::Cow;
|
||||
|
||||
CommandDescriptions::new(&[
|
||||
#(#command_descriptions),*
|
||||
])
|
||||
#global_description
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn impl_parse(
|
||||
infos: &[Command],
|
||||
global: &CommandEnum,
|
||||
variants_initialization: &[proc_macro2::TokenStream],
|
||||
) -> proc_macro2::TokenStream {
|
||||
let matching_values = infos.iter().map(|c| c.get_matched_value(global));
|
||||
|
||||
quote! {
|
||||
fn parse<N>(s: &str, bot_name: N) -> Result<Self, teloxide::utils::command::ParseError>
|
||||
where
|
||||
N: Into<String>
|
||||
{
|
||||
// FIXME: we should probably just call a helper function from `teloxide`, instead of parsing command syntax ourselves
|
||||
use std::str::FromStr;
|
||||
use teloxide::utils::command::ParseError;
|
||||
|
||||
// 2 is used to only split once (=> in two parts),
|
||||
// we only need to split the command and the rest of arguments.
|
||||
let mut words = s.splitn(2, ' ');
|
||||
|
||||
// Unwrap: split iterators always have at least one item
|
||||
let mut full_command = words.next().unwrap().split('@');
|
||||
let command = full_command.next().unwrap();
|
||||
|
||||
let bot_username = full_command.next();
|
||||
match bot_username {
|
||||
None => {}
|
||||
Some(username) if username.eq_ignore_ascii_case(&bot_name.into()) => {}
|
||||
Some(n) => return Err(ParseError::WrongBotName(n.to_owned())),
|
||||
}
|
||||
|
||||
let args = words.next().unwrap_or("").to_owned();
|
||||
match command {
|
||||
#(
|
||||
#matching_values => Ok(#variants_initialization),
|
||||
)*
|
||||
_ => Err(ParseError::UnknownCommand(command.to_owned())),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_enum_data(input: &DeriveInput) -> Result<&syn::DataEnum> {
|
||||
match &input.data {
|
||||
syn::Data::Enum(data) => Ok(data),
|
||||
_ => Err(compile_error("`BotCommands` is only allowed for enums")),
|
||||
}
|
||||
}
|
|
@ -1,33 +1,28 @@
|
|||
use crate::{
|
||||
attr::{Attr, BotCommandAttribute},
|
||||
command_enum::CommandEnum,
|
||||
fields_parse::ParserType,
|
||||
rename_rules::rename_by_rule,
|
||||
command_attr::CommandAttrs, command_enum::CommandEnum,
|
||||
fields_parse::ParserType, rename_rules::RenameRule, Result,
|
||||
};
|
||||
|
||||
pub struct Command {
|
||||
pub(crate) struct Command {
|
||||
pub prefix: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub parser: Option<ParserType>,
|
||||
pub name: String,
|
||||
pub renamed: bool,
|
||||
}
|
||||
|
||||
impl Command {
|
||||
pub fn try_from(attrs: &[Attr], name: &str) -> Result<Self, String> {
|
||||
let attrs = parse_attrs(attrs)?;
|
||||
let mut new_name = name.to_string();
|
||||
let mut renamed = false;
|
||||
pub fn try_from(attrs: CommandAttrs, name: &str) -> Result<Self> {
|
||||
let CommandAttrs {
|
||||
prefix,
|
||||
description,
|
||||
rename_rule,
|
||||
parser,
|
||||
separator: _,
|
||||
} = attrs;
|
||||
|
||||
let prefix = attrs.prefix;
|
||||
let description = attrs.description;
|
||||
let rename = attrs.rename;
|
||||
let parser = attrs.parser;
|
||||
if let Some(rename_rule) = rename {
|
||||
new_name = rename_by_rule(name, &rename_rule);
|
||||
renamed = true;
|
||||
}
|
||||
Ok(Self { prefix, description, parser, name: new_name, renamed })
|
||||
let name = rename_rule.unwrap_or(RenameRule::Identity).apply(name);
|
||||
|
||||
Ok(Self { prefix, description, parser, name })
|
||||
}
|
||||
|
||||
pub fn get_matched_value(&self, global_parameters: &CommandEnum) -> String {
|
||||
|
@ -38,11 +33,8 @@ impl Command {
|
|||
} else {
|
||||
"/"
|
||||
};
|
||||
if let Some(rule) = &global_parameters.rename_rule {
|
||||
String::from(prefix) + &rename_by_rule(&self.name, rule.as_str())
|
||||
} else {
|
||||
String::from(prefix) + &self.name
|
||||
}
|
||||
|
||||
String::from(prefix) + &global_parameters.rename_rule.apply(&self.name)
|
||||
}
|
||||
|
||||
pub fn get_matched_value2(
|
||||
|
@ -56,48 +48,11 @@ impl Command {
|
|||
} else {
|
||||
"/"
|
||||
};
|
||||
if let Some(rule) = &global_parameters.rename_rule {
|
||||
(String::from(prefix), rename_by_rule(&self.name, rule.as_str()))
|
||||
} else {
|
||||
(String::from(prefix), self.name.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CommandAttrs {
|
||||
pub(crate) prefix: Option<String>,
|
||||
pub(crate) description: Option<String>,
|
||||
pub(crate) rename: Option<String>,
|
||||
pub(crate) parser: Option<ParserType>,
|
||||
pub(crate) separator: Option<String>,
|
||||
}
|
||||
|
||||
pub fn parse_attrs(attrs: &[Attr]) -> Result<CommandAttrs, String> {
|
||||
let mut prefix = None;
|
||||
let mut description = None;
|
||||
let mut rename_rule = None;
|
||||
let mut parser = None;
|
||||
let mut separator = None;
|
||||
|
||||
for attr in attrs {
|
||||
match attr.name() {
|
||||
BotCommandAttribute::Prefix => prefix = Some(attr.value()),
|
||||
BotCommandAttribute::Description => {
|
||||
description = Some(attr.value())
|
||||
}
|
||||
BotCommandAttribute::RenameRule => rename_rule = Some(attr.value()),
|
||||
BotCommandAttribute::CustomParser => {
|
||||
parser = Some(ParserType::parse(&attr.value()))
|
||||
}
|
||||
BotCommandAttribute::Separator => separator = Some(attr.value()),
|
||||
}
|
||||
(String::from(prefix), global_parameters.rename_rule.apply(&self.name))
|
||||
}
|
||||
|
||||
Ok(CommandAttrs {
|
||||
prefix,
|
||||
description,
|
||||
rename: rename_rule,
|
||||
parser,
|
||||
separator,
|
||||
})
|
||||
pub(crate) fn description_is_enabled(&self) -> bool {
|
||||
self.description != Some("off".to_owned())
|
||||
}
|
||||
}
|
||||
|
|
115
src/command_attr.rs
Normal file
115
src/command_attr.rs
Normal file
|
@ -0,0 +1,115 @@
|
|||
use crate::{
|
||||
attr::{fold_attrs, Attr},
|
||||
error::compile_error_at,
|
||||
fields_parse::ParserType,
|
||||
rename_rules::RenameRule,
|
||||
Result,
|
||||
};
|
||||
|
||||
use proc_macro2::Span;
|
||||
use syn::Attribute;
|
||||
|
||||
/// Attributes for `BotCommands` derive macro.
|
||||
pub(crate) struct CommandAttrs {
|
||||
pub prefix: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub rename_rule: Option<RenameRule>,
|
||||
pub parser: Option<ParserType>,
|
||||
pub separator: Option<String>,
|
||||
}
|
||||
|
||||
/// An attribute for `BotCommands` derive macro.
|
||||
pub(crate) struct CommandAttr {
|
||||
kind: CommandAttrKind,
|
||||
sp: Span,
|
||||
}
|
||||
|
||||
pub(crate) enum CommandAttrKind {
|
||||
Prefix(String),
|
||||
Description(String),
|
||||
Rename(RenameRule),
|
||||
ParseWith(ParserType),
|
||||
Separator(String),
|
||||
}
|
||||
|
||||
impl CommandAttrs {
|
||||
pub fn from_attributes(attributes: &[Attribute]) -> Result<Self> {
|
||||
use CommandAttrKind::*;
|
||||
|
||||
fold_attrs(
|
||||
attributes,
|
||||
is_command_attribute,
|
||||
CommandAttr::parse,
|
||||
Self {
|
||||
prefix: None,
|
||||
description: None,
|
||||
rename_rule: None,
|
||||
parser: None,
|
||||
separator: None,
|
||||
},
|
||||
|mut this, attr| {
|
||||
fn insert<T>(
|
||||
opt: &mut Option<T>,
|
||||
x: T,
|
||||
sp: Span,
|
||||
) -> Result<()> {
|
||||
match opt {
|
||||
slot @ None => {
|
||||
*slot = Some(x);
|
||||
Ok(())
|
||||
}
|
||||
Some(_) => {
|
||||
Err(compile_error_at("duplicate attribute", sp))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match attr.kind {
|
||||
Prefix(p) => insert(&mut this.prefix, p, attr.sp),
|
||||
Description(d) => insert(&mut this.description, d, attr.sp),
|
||||
Rename(r) => insert(&mut this.rename_rule, r, attr.sp),
|
||||
ParseWith(p) => insert(&mut this.parser, p, attr.sp),
|
||||
Separator(s) => insert(&mut this.separator, s, attr.sp),
|
||||
}?;
|
||||
|
||||
Ok(this)
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl CommandAttr {
|
||||
fn parse(attr: Attr) -> Result<Self> {
|
||||
use CommandAttrKind::*;
|
||||
|
||||
let sp = attr.span();
|
||||
let Attr { key, value } = attr;
|
||||
let kind = match &*key.to_string() {
|
||||
"prefix" => Prefix(value.expect_string()?),
|
||||
"description" => Description(value.expect_string()?),
|
||||
"rename" => Rename(
|
||||
value.expect_string().and_then(|r| RenameRule::parse(&r))?,
|
||||
),
|
||||
"parse_with" => {
|
||||
ParseWith(value.expect_string().map(|p| ParserType::parse(&p))?)
|
||||
}
|
||||
"separator" => Separator(value.expect_string()?),
|
||||
_ => {
|
||||
return Err(compile_error_at(
|
||||
"unexpected attribute name (expected one of `prefix`, \
|
||||
`description`, `rename`, `parse_with` and `separator`",
|
||||
key.span(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Self { kind, sp })
|
||||
}
|
||||
}
|
||||
|
||||
fn is_command_attribute(a: &Attribute) -> bool {
|
||||
match a.path.get_ident() {
|
||||
Some(ident) => ident == "command",
|
||||
_ => false,
|
||||
}
|
||||
}
|
|
@ -1,44 +1,37 @@
|
|||
use crate::{attr::Attr, command::parse_attrs, fields_parse::ParserType};
|
||||
use crate::{
|
||||
command_attr::CommandAttrs, fields_parse::ParserType,
|
||||
rename_rules::RenameRule, Result,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CommandEnum {
|
||||
pub(crate) struct CommandEnum {
|
||||
pub prefix: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub rename_rule: Option<String>,
|
||||
pub rename_rule: RenameRule,
|
||||
pub parser_type: ParserType,
|
||||
}
|
||||
|
||||
impl CommandEnum {
|
||||
pub fn try_from(attrs: &[Attr]) -> Result<Self, String> {
|
||||
let attrs = parse_attrs(attrs)?;
|
||||
pub fn try_from(attrs: CommandAttrs) -> Result<Self> {
|
||||
let CommandAttrs {
|
||||
prefix,
|
||||
description,
|
||||
rename_rule,
|
||||
parser,
|
||||
separator,
|
||||
} = attrs;
|
||||
let mut parser = parser.unwrap_or(ParserType::Default);
|
||||
|
||||
let prefix = attrs.prefix;
|
||||
let description = attrs.description;
|
||||
let rename = attrs.rename;
|
||||
let separator = attrs.separator;
|
||||
let mut parser = attrs.parser.unwrap_or(ParserType::Default);
|
||||
// FIXME: Error on unused separator
|
||||
if let (ParserType::Split { separator }, Some(s)) =
|
||||
(&mut parser, &separator)
|
||||
{
|
||||
*separator = Some(s.clone())
|
||||
}
|
||||
if let Some(rename_rule) = &rename {
|
||||
match rename_rule.as_str() {
|
||||
"lowercase"
|
||||
| "UPPERCASE"
|
||||
| "PascalCase"
|
||||
| "camelCase"
|
||||
| "snake_case"
|
||||
| "SCREAMING_SNAKE_CASE"
|
||||
| "kebab-case"
|
||||
| "SCREAMING-KEBAB-CASE" => {}
|
||||
_ => return Err("disallowed value".to_owned()),
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
prefix,
|
||||
description,
|
||||
rename_rule: rename,
|
||||
rename_rule: rename_rule.unwrap_or(RenameRule::Identity),
|
||||
parser_type: parser,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,229 +0,0 @@
|
|||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
spanned::Spanned,
|
||||
Fields, FieldsNamed, GenericParam, ItemEnum, Path, Type,
|
||||
};
|
||||
|
||||
pub fn expand(item: ItemEnum) -> Result<TokenStream, syn::Error> {
|
||||
let enum_ident = &item.ident;
|
||||
let self_params_with_bounds = {
|
||||
let params = &item.generics.params;
|
||||
if !params.is_empty() {
|
||||
quote! { < #params > }
|
||||
} else {
|
||||
quote! {}
|
||||
}
|
||||
};
|
||||
let self_params = {
|
||||
let params = &item.generics.params;
|
||||
if !params.is_empty() {
|
||||
let mut params = quote! { < };
|
||||
item.generics.params.iter().for_each(|param| match param {
|
||||
GenericParam::Type(ty) => {
|
||||
let ident = &ty.ident;
|
||||
params.extend(quote! { #ident, });
|
||||
}
|
||||
GenericParam::Lifetime(li) => {
|
||||
let li = &li.lifetime;
|
||||
params.extend(quote! { #li, })
|
||||
}
|
||||
GenericParam::Const(_par) => todo!(),
|
||||
});
|
||||
params.extend(quote! { > });
|
||||
params
|
||||
} else {
|
||||
quote! {}
|
||||
}
|
||||
};
|
||||
let where_clause = match item.generics.where_clause.clone() {
|
||||
Some(mut clause) => {
|
||||
let predicate = quote! { Self: Clone + Send + Sync + 'static };
|
||||
clause.predicates.push(syn::parse2(predicate).unwrap());
|
||||
Some(clause)
|
||||
}
|
||||
x => x,
|
||||
};
|
||||
let out = parse_out_type(item.ident.span(), &item.attrs)?;
|
||||
|
||||
let mut branches = quote! {};
|
||||
for variant in item.variants.iter() {
|
||||
let handler = {
|
||||
let handler_attr = variant
|
||||
.attrs
|
||||
.iter()
|
||||
.find(|attr| attr.path.is_ident("handler"))
|
||||
.ok_or_else(|| {
|
||||
syn::Error::new(
|
||||
variant.span(),
|
||||
"Expected `handler` attribute.",
|
||||
)
|
||||
})?;
|
||||
handler_attr.parse_args::<HandlerAttr>()?
|
||||
};
|
||||
|
||||
branches.extend(match &variant.fields {
|
||||
Fields::Named(fields) => create_branch_multiple_fields_named(
|
||||
enum_ident,
|
||||
&self_params,
|
||||
&variant.ident,
|
||||
&handler.func,
|
||||
fields,
|
||||
),
|
||||
Fields::Unnamed(fields) => match fields.unnamed.len() {
|
||||
1 => create_branch_one_field(
|
||||
enum_ident,
|
||||
&self_params,
|
||||
&variant.ident,
|
||||
&handler.func,
|
||||
),
|
||||
len => create_branch_multiple_fields(
|
||||
enum_ident,
|
||||
&self_params,
|
||||
&variant.ident,
|
||||
&handler.func,
|
||||
len,
|
||||
),
|
||||
},
|
||||
Fields::Unit => create_branch_no_fields(
|
||||
enum_ident,
|
||||
&self_params,
|
||||
&variant.ident,
|
||||
&handler.func,
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(quote! {const _: () = {
|
||||
fn assert_clone<T: Clone>() {}
|
||||
|
||||
use teloxide::dptree;
|
||||
use teloxide::dispatching::dialogue::Dialogue;
|
||||
|
||||
impl #self_params_with_bounds teloxide::dispatching::HandlerFactory for #enum_ident #self_params #where_clause {
|
||||
type Out = #out;
|
||||
|
||||
fn handler() -> dptree::Handler<'static, dptree::di::DependencyMap, Self::Out,
|
||||
teloxide::dispatching::DpHandlerDescription> {
|
||||
assert_clone::<#enum_ident #self_params>();
|
||||
|
||||
dptree::entry()
|
||||
#branches
|
||||
}
|
||||
}
|
||||
};})
|
||||
}
|
||||
|
||||
fn create_branch_no_fields(
|
||||
state: &Ident,
|
||||
state_generics: impl ToTokens,
|
||||
kind: &Ident,
|
||||
handler: &Path,
|
||||
) -> TokenStream {
|
||||
quote! {
|
||||
.branch(
|
||||
dptree::filter(|state: #state #state_generics| {
|
||||
match state { #state::#kind => true, _ => false }
|
||||
}).endpoint(#handler)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn create_branch_one_field(
|
||||
state: &Ident,
|
||||
state_generics: impl ToTokens,
|
||||
kind: &Ident,
|
||||
handler: &Path,
|
||||
) -> TokenStream {
|
||||
quote! {
|
||||
.branch(
|
||||
dptree::filter_map(|state: #state #state_generics| {
|
||||
match state { #state::#kind(arg) => Some(arg), _ => None }
|
||||
}).endpoint(#handler)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn create_branch_multiple_fields(
|
||||
state: &Ident,
|
||||
state_generics: impl ToTokens,
|
||||
kind: &Ident,
|
||||
handler: &Path,
|
||||
fields_count: usize,
|
||||
) -> TokenStream {
|
||||
let fields = gen_variant_field_names(fields_count);
|
||||
|
||||
quote! {
|
||||
.branch(
|
||||
dptree::filter_map(|state: #state #state_generics| {
|
||||
match state { #state::#kind(#fields) => Some((#fields)), _ => None }
|
||||
}).endpoint(#handler)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_variant_field_names(len: usize) -> TokenStream {
|
||||
let mut fields = quote! {};
|
||||
|
||||
for i in 0..len {
|
||||
let idx = format_ident!("_{}", i);
|
||||
fields.extend(quote! { #idx, });
|
||||
}
|
||||
|
||||
fields
|
||||
}
|
||||
|
||||
fn create_branch_multiple_fields_named(
|
||||
state: &Ident,
|
||||
state_generics: impl ToTokens,
|
||||
kind: &Ident,
|
||||
handler: &Path,
|
||||
fields_named: &FieldsNamed,
|
||||
) -> TokenStream {
|
||||
let mut fields = quote! {};
|
||||
|
||||
for field in fields_named.named.iter() {
|
||||
let ident =
|
||||
field.ident.as_ref().expect("Named fields must have identifiers");
|
||||
fields.extend(quote! { #ident, });
|
||||
}
|
||||
|
||||
quote! {
|
||||
.branch(
|
||||
dptree::filter_map(|state: #state #state_generics| {
|
||||
match state { #state::#kind { #fields } => Some((#fields)), _ => None }
|
||||
}).endpoint(#handler)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_out_type(
|
||||
span: Span,
|
||||
attrs: &[syn::Attribute],
|
||||
) -> Result<Type, syn::Error> {
|
||||
let mut out = None;
|
||||
for x in attrs {
|
||||
if x.path.is_ident("handler_out") {
|
||||
out = Some(x.parse_args::<Type>()?);
|
||||
}
|
||||
}
|
||||
if let Some(out) = out {
|
||||
return Ok(out);
|
||||
}
|
||||
Err(syn::Error::new(
|
||||
span,
|
||||
"You must specify #[handler_out()] argument in which declare output \
|
||||
type of handlers. For example, #[handler_out(Result<(), Error>)]",
|
||||
))
|
||||
}
|
||||
|
||||
pub struct HandlerAttr {
|
||||
func: Path,
|
||||
}
|
||||
|
||||
impl Parse for HandlerAttr {
|
||||
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
|
||||
Ok(Self { func: input.parse::<Path>()? })
|
||||
}
|
||||
}
|
56
src/error.rs
Normal file
56
src/error.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
|
||||
pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Error(TokenStream);
|
||||
|
||||
pub(crate) fn compile_error<T>(data: T) -> Error
|
||||
where
|
||||
T: ToTokens,
|
||||
{
|
||||
Error(quote! { compile_error! { #data } })
|
||||
}
|
||||
|
||||
pub(crate) fn compile_error_at(msg: &str, sp: Span) -> Error {
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, Literal, Punct, Spacing, TokenTree,
|
||||
};
|
||||
use std::iter::FromIterator;
|
||||
|
||||
// compile_error! { $msg }
|
||||
let ts = TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("compile_error", sp)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new('!', Spacing::Alone);
|
||||
punct.set_span(sp);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Group({
|
||||
let mut group = Group::new(Delimiter::Brace, {
|
||||
TokenStream::from_iter(vec![TokenTree::Literal({
|
||||
let mut string = Literal::string(msg);
|
||||
string.set_span(sp);
|
||||
string
|
||||
})])
|
||||
});
|
||||
group.set_span(sp);
|
||||
group
|
||||
}),
|
||||
]);
|
||||
|
||||
Error(ts)
|
||||
}
|
||||
|
||||
impl From<Error> for proc_macro2::TokenStream {
|
||||
fn from(Error(e): Error) -> Self {
|
||||
e
|
||||
}
|
||||
}
|
||||
|
||||
impl From<syn::Error> for Error {
|
||||
fn from(e: syn::Error) -> Self {
|
||||
Self(e.to_compile_error())
|
||||
}
|
||||
}
|
|
@ -1,16 +1,15 @@
|
|||
extern crate quote;
|
||||
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{FieldsNamed, FieldsUnnamed, Type};
|
||||
use quote::quote;
|
||||
use syn::{Fields, FieldsNamed, FieldsUnnamed, Type};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ParserType {
|
||||
pub(crate) enum ParserType {
|
||||
Default,
|
||||
Split { separator: Option<String> },
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
impl ParserType {
|
||||
// FIXME: use path for custom
|
||||
pub fn parse(data: &str) -> Self {
|
||||
match data {
|
||||
"default" => ParserType::Default,
|
||||
|
@ -20,16 +19,29 @@ impl ParserType {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn impl_parse_args_unnamed(
|
||||
pub(crate) fn impl_parse_args(
|
||||
fields: &Fields,
|
||||
self_variant: proc_macro2::TokenStream,
|
||||
parser: &ParserType,
|
||||
) -> proc_macro2::TokenStream {
|
||||
match fields {
|
||||
Fields::Unit => self_variant,
|
||||
Fields::Unnamed(fields) => {
|
||||
impl_parse_args_unnamed(fields, self_variant, parser)
|
||||
}
|
||||
Fields::Named(named) => {
|
||||
impl_parse_args_named(named, self_variant, parser)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn impl_parse_args_unnamed(
|
||||
data: &FieldsUnnamed,
|
||||
variant: impl ToTokens,
|
||||
variant: proc_macro2::TokenStream,
|
||||
parser_type: &ParserType,
|
||||
) -> quote::__private::TokenStream {
|
||||
let get_arguments = create_parser(
|
||||
parser_type,
|
||||
data.unnamed.iter().map(|f| &f.ty),
|
||||
data.unnamed.len(),
|
||||
);
|
||||
) -> proc_macro2::TokenStream {
|
||||
let get_arguments =
|
||||
create_parser(parser_type, data.unnamed.iter().map(|f| &f.ty));
|
||||
let iter = (0..data.unnamed.len()).map(syn::Index::from);
|
||||
let mut initialization = quote! {};
|
||||
for i in iter {
|
||||
|
@ -44,17 +56,14 @@ pub fn impl_parse_args_unnamed(
|
|||
res
|
||||
}
|
||||
|
||||
pub fn impl_parse_args_named(
|
||||
pub(crate) fn impl_parse_args_named(
|
||||
data: &FieldsNamed,
|
||||
variant: impl ToTokens,
|
||||
variant: proc_macro2::TokenStream,
|
||||
parser_type: &ParserType,
|
||||
) -> quote::__private::TokenStream {
|
||||
let get_arguments = create_parser(
|
||||
parser_type,
|
||||
data.named.iter().map(|f| &f.ty),
|
||||
data.named.len(),
|
||||
);
|
||||
let i = (0..data.named.len()).map(syn::Index::from);
|
||||
) -> proc_macro2::TokenStream {
|
||||
let get_arguments =
|
||||
create_parser(parser_type, data.named.iter().map(|f| &f.ty));
|
||||
let i = (0..).map(syn::Index::from);
|
||||
let name = data.named.iter().map(|f| f.ident.as_ref().unwrap());
|
||||
let res = quote! {
|
||||
{
|
||||
|
@ -67,26 +76,30 @@ pub fn impl_parse_args_named(
|
|||
|
||||
fn create_parser<'a>(
|
||||
parser_type: &ParserType,
|
||||
mut types: impl Iterator<Item = &'a Type>,
|
||||
count_args: usize,
|
||||
) -> quote::__private::TokenStream {
|
||||
mut types: impl ExactSizeIterator<Item = &'a Type>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let function_to_parse = match parser_type {
|
||||
ParserType::Default => match count_args {
|
||||
ParserType::Default => match types.len() {
|
||||
1 => {
|
||||
let ty = types.next().expect("count_args != types.len()");
|
||||
quote! { (|s: String| {
|
||||
let res = <#ty>::from_str(&s)
|
||||
.map_err(|e|ParseError::IncorrectFormat({ let e: Box<dyn std::error::Error + Send + Sync + 'static> = e.into(); e }))?;
|
||||
Ok((res, ))
|
||||
})
|
||||
let ty = types.next().unwrap();
|
||||
quote! {
|
||||
(
|
||||
|s: String| {
|
||||
let res = <#ty>::from_str(&s)
|
||||
.map_err(|e| ParseError::IncorrectFormat(e.into()))?;
|
||||
|
||||
Ok((res,))
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
_ => quote! { compile_error!("Expected exactly 1 argument") },
|
||||
_ => {
|
||||
quote! { compile_error!("Default parser works only with exactly 1 field") }
|
||||
}
|
||||
},
|
||||
ParserType::Split { separator } => parser_with_separator(
|
||||
&separator.clone().unwrap_or_else(|| " ".to_owned()),
|
||||
types,
|
||||
count_args,
|
||||
),
|
||||
ParserType::Custom(s) => {
|
||||
let path = syn::parse_str::<syn::Path>(s).unwrap_or_else(|_| {
|
||||
|
@ -95,6 +108,7 @@ fn create_parser<'a>(
|
|||
quote! { #path }
|
||||
}
|
||||
};
|
||||
|
||||
quote! {
|
||||
let arguments = #function_to_parse(args)?;
|
||||
}
|
||||
|
@ -102,31 +116,46 @@ fn create_parser<'a>(
|
|||
|
||||
fn parser_with_separator<'a>(
|
||||
separator: &str,
|
||||
types: impl Iterator<Item = &'a Type>,
|
||||
count_args: usize,
|
||||
) -> quote::__private::TokenStream {
|
||||
let inner = quote! { let mut splited = s.split(#separator); };
|
||||
let i = 0..count_args;
|
||||
let inner2 = quote! {
|
||||
#(<#types>::from_str(splited.next().ok_or(ParseError::TooFewArguments {
|
||||
expected: #count_args,
|
||||
found: #i,
|
||||
message: format!("Expected but not found arg number {}", #i + 1),
|
||||
})?).map_err(|e|ParseError::IncorrectFormat({ let e: Box<dyn std::error::Error + Send + Sync + 'static> = e.into(); e }))?,)*
|
||||
types: impl ExactSizeIterator<Item = &'a Type>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let expected = types.len();
|
||||
let res = {
|
||||
let found = 0usize..;
|
||||
quote! {
|
||||
(
|
||||
#(
|
||||
{
|
||||
let s = splitted.next().ok_or(ParseError::TooFewArguments {
|
||||
expected: #expected,
|
||||
found: #found,
|
||||
message: format!("Expected but not found arg number {}", #found + 1),
|
||||
})?;
|
||||
|
||||
<#types>::from_str(s).map_err(|e| ParseError::IncorrectFormat(e.into()))?
|
||||
}
|
||||
),*
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let res = quote! {
|
||||
(|s: String| {
|
||||
#inner
|
||||
let res = (#inner2);
|
||||
match splited.next() {
|
||||
Some(d) => Err(ParseError::TooManyArguments {
|
||||
expected: #count_args,
|
||||
found: #count_args + 1,
|
||||
message: format!("Excess argument: {}", d),
|
||||
}),
|
||||
None => Ok(res)
|
||||
(
|
||||
|s: String| {
|
||||
let mut splitted = s.split(#separator);
|
||||
|
||||
let res = #res;
|
||||
|
||||
match splitted.next() {
|
||||
Some(d) => Err(ParseError::TooManyArguments {
|
||||
expected: #expected,
|
||||
found: #expected + 1,
|
||||
message: format!("Excess argument: {}", d),
|
||||
}),
|
||||
None => Ok(res)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
};
|
||||
|
||||
res
|
||||
}
|
||||
|
|
228
src/lib.rs
228
src/lib.rs
|
@ -1,230 +1,24 @@
|
|||
// TODO: refactor this shit.
|
||||
extern crate proc_macro;
|
||||
|
||||
mod attr;
|
||||
mod bot_commands;
|
||||
mod command;
|
||||
mod command_attr;
|
||||
mod command_enum;
|
||||
mod dialogue_state;
|
||||
mod error;
|
||||
mod fields_parse;
|
||||
mod rename_rules;
|
||||
mod unzip;
|
||||
|
||||
extern crate proc_macro;
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
use crate::{
|
||||
attr::{Attr, VecAttrs},
|
||||
command::Command,
|
||||
command_enum::CommandEnum,
|
||||
fields_parse::{impl_parse_args_named, impl_parse_args_unnamed},
|
||||
};
|
||||
pub(crate) use error::{compile_error, Result};
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
|
||||
use crate::bot_commands::bot_commands_impl;
|
||||
use proc_macro::TokenStream;
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{parse_macro_input, DeriveInput, Fields, ItemEnum};
|
||||
|
||||
#[proc_macro_derive(DialogueState, attributes(handler, handler_out, store))]
|
||||
#[deprecated(note = "Use teloxide::handler! instead")]
|
||||
pub fn derive_dialogue_state(item: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(item as ItemEnum);
|
||||
match dialogue_state::expand(input) {
|
||||
Ok(s) => s.into(),
|
||||
Err(e) => e.to_compile_error().into(),
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! get_or_return {
|
||||
($($some:tt)*) => {
|
||||
match $($some)* {
|
||||
Ok(elem) => elem,
|
||||
Err(e) => return e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro_derive(BotCommands, attributes(command))]
|
||||
pub fn derive_telegram_command_enum(tokens: TokenStream) -> TokenStream {
|
||||
pub fn bot_commands_derive(tokens: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(tokens as DeriveInput);
|
||||
|
||||
let data_enum: &syn::DataEnum = get_or_return!(get_enum_data(&input));
|
||||
|
||||
let enum_attrs: Vec<Attr> = get_or_return!(parse_attributes(&input.attrs));
|
||||
|
||||
let command_enum = match CommandEnum::try_from(enum_attrs.as_slice()) {
|
||||
Ok(command_enum) => command_enum,
|
||||
Err(e) => return compile_error(e),
|
||||
};
|
||||
|
||||
let variants: Vec<&syn::Variant> = data_enum.variants.iter().collect();
|
||||
|
||||
let mut variant_infos = vec![];
|
||||
for variant in variants.iter() {
|
||||
let mut attrs = Vec::new();
|
||||
for attr in &variant.attrs {
|
||||
match attr.parse_args::<VecAttrs>() {
|
||||
Ok(mut attrs_) => {
|
||||
attrs.append(attrs_.data.as_mut());
|
||||
}
|
||||
Err(e) => {
|
||||
return compile_error(e.to_compile_error());
|
||||
}
|
||||
}
|
||||
}
|
||||
match Command::try_from(attrs.as_slice(), &variant.ident.to_string()) {
|
||||
Ok(command) => variant_infos.push(command),
|
||||
Err(e) => return compile_error(e),
|
||||
}
|
||||
}
|
||||
|
||||
let mut vec_impl_create = vec![];
|
||||
for (variant, info) in variants.iter().zip(variant_infos.iter()) {
|
||||
let var = &variant.ident;
|
||||
let variantt = quote! { Self::#var };
|
||||
match &variant.fields {
|
||||
Fields::Unnamed(fields) => {
|
||||
let parser =
|
||||
info.parser.as_ref().unwrap_or(&command_enum.parser_type);
|
||||
vec_impl_create
|
||||
.push(impl_parse_args_unnamed(fields, variantt, parser));
|
||||
}
|
||||
Fields::Unit => {
|
||||
vec_impl_create.push(variantt);
|
||||
}
|
||||
Fields::Named(named) => {
|
||||
let parser =
|
||||
info.parser.as_ref().unwrap_or(&command_enum.parser_type);
|
||||
vec_impl_create
|
||||
.push(impl_parse_args_named(named, variantt, parser));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let ident = &input.ident;
|
||||
|
||||
let fn_descriptions = impl_descriptions(&variant_infos, &command_enum);
|
||||
let fn_parse = impl_parse(&variant_infos, &command_enum, &vec_impl_create);
|
||||
let fn_commands = impl_commands(&variant_infos, &command_enum);
|
||||
|
||||
let trait_impl = quote! {
|
||||
impl BotCommands for #ident {
|
||||
#fn_descriptions
|
||||
#fn_parse
|
||||
#fn_commands
|
||||
}
|
||||
};
|
||||
|
||||
TokenStream::from(trait_impl)
|
||||
}
|
||||
|
||||
fn impl_commands(
|
||||
infos: &[Command],
|
||||
global: &CommandEnum,
|
||||
) -> quote::__private::TokenStream {
|
||||
let commands_to_list = infos.iter().filter_map(|command| {
|
||||
if command.description == Some("off".into()) {
|
||||
None
|
||||
} else {
|
||||
let c = command.get_matched_value(global);
|
||||
let d = command.description.as_deref().unwrap_or_default();
|
||||
Some(quote! { BotCommand::new(#c,#d) })
|
||||
}
|
||||
});
|
||||
quote! {
|
||||
fn bot_commands() -> Vec<teloxide::types::BotCommand> {
|
||||
use teloxide::types::BotCommand;
|
||||
vec![#(#commands_to_list),*]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn impl_descriptions(
|
||||
infos: &[Command],
|
||||
global: &CommandEnum,
|
||||
) -> quote::__private::TokenStream {
|
||||
let command_descriptions = infos.iter().filter_map(|c| {
|
||||
let (prefix, command) = c.get_matched_value2(global);
|
||||
let description = c.description.clone().unwrap_or_default();
|
||||
(description != "off").then(|| quote! { CommandDescription { prefix: #prefix, command: #command, description: #description } })
|
||||
});
|
||||
|
||||
let global_description = match global.description.as_deref() {
|
||||
Some(gd) => quote! { .global_description(#gd) },
|
||||
None => quote! {},
|
||||
};
|
||||
|
||||
quote! {
|
||||
fn descriptions() -> teloxide::utils::command::CommandDescriptions<'static> {
|
||||
use teloxide::utils::command::{CommandDescriptions, CommandDescription};
|
||||
use std::borrow::Cow;
|
||||
|
||||
CommandDescriptions::new(&[
|
||||
#(#command_descriptions),*
|
||||
])
|
||||
#global_description
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn impl_parse(
|
||||
infos: &[Command],
|
||||
global: &CommandEnum,
|
||||
variants_initialization: &[quote::__private::TokenStream],
|
||||
) -> quote::__private::TokenStream {
|
||||
let matching_values = infos.iter().map(|c| c.get_matched_value(global));
|
||||
|
||||
quote! {
|
||||
fn parse<N>(s: &str, bot_name: N) -> Result<Self, teloxide::utils::command::ParseError>
|
||||
where
|
||||
N: Into<String>
|
||||
{
|
||||
use std::str::FromStr;
|
||||
use teloxide::utils::command::ParseError;
|
||||
|
||||
let mut words = s.splitn(2, ' ');
|
||||
let mut splited = words.next().expect("First item will be always.").split('@');
|
||||
let command_raw = splited.next().expect("First item will be always.");
|
||||
let bot = splited.next();
|
||||
let bot_name = bot_name.into();
|
||||
match bot {
|
||||
Some(name) if name.eq_ignore_ascii_case(&bot_name) => {}
|
||||
None => {}
|
||||
Some(n) => return Err(ParseError::WrongBotName(n.to_string())),
|
||||
}
|
||||
let mut args = words.next().unwrap_or("").to_string();
|
||||
match command_raw {
|
||||
#(
|
||||
#matching_values => Ok(#variants_initialization),
|
||||
)*
|
||||
_ => Err(ParseError::UnknownCommand(command_raw.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_enum_data(input: &DeriveInput) -> Result<&syn::DataEnum, TokenStream> {
|
||||
match &input.data {
|
||||
syn::Data::Enum(data) => Ok(data),
|
||||
_ => Err(compile_error("TelegramBotCommand allowed only for enums")),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attributes(
|
||||
input: &[syn::Attribute],
|
||||
) -> Result<Vec<Attr>, TokenStream> {
|
||||
let mut enum_attrs = Vec::new();
|
||||
for attr in input.iter() {
|
||||
match attr.parse_args::<VecAttrs>() {
|
||||
Ok(mut attrs_) => {
|
||||
enum_attrs.append(attrs_.data.as_mut());
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(compile_error(e.to_compile_error()));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(enum_attrs)
|
||||
}
|
||||
|
||||
fn compile_error<T>(data: T) -> TokenStream
|
||||
where
|
||||
T: ToTokens,
|
||||
{
|
||||
TokenStream::from(quote! { compile_error!(#data) })
|
||||
bot_commands_impl(input).unwrap_or_else(<_>::into).into()
|
||||
}
|
||||
|
|
|
@ -1,27 +1,79 @@
|
|||
// Some concepts are from Serde.
|
||||
|
||||
use crate::error::{compile_error, Result};
|
||||
|
||||
use heck::{
|
||||
ToKebabCase, ToLowerCamelCase, ToPascalCase, ToShoutyKebabCase,
|
||||
ToShoutySnakeCase, ToSnakeCase,
|
||||
};
|
||||
|
||||
/// Apply a renaming rule to an enum variant,
|
||||
/// returning the version expected in the source.
|
||||
///
|
||||
/// The possible `rule` can be: `lowercase`, `UPPERCASE`, `PascalCase`,
|
||||
/// `camelCase`, `snake_case`, `SCREAMING_SNAKE_CASE`, `kebab-case`,
|
||||
/// `SCREAMING-KEBAB-CASE`. See tests for the details how it will work.
|
||||
pub fn rename_by_rule(input: &str, rule: &str) -> String {
|
||||
match rule {
|
||||
"lowercase" => input.to_lowercase(),
|
||||
"UPPERCASE" => input.to_uppercase(),
|
||||
"PascalCase" => input.to_pascal_case(),
|
||||
"camelCase" => input.to_lower_camel_case(),
|
||||
"snake_case" => input.to_snake_case(),
|
||||
"SCREAMING_SNAKE_CASE" => input.to_shouty_snake_case(),
|
||||
"kebab-case" => input.to_kebab_case(),
|
||||
"SCREAMING-KEBAB-CASE" => input.to_shouty_kebab_case(),
|
||||
_ => rule.to_string(),
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum RenameRule {
|
||||
/// -> `lowercase`
|
||||
LowerCase,
|
||||
/// -> `UPPERCASE`
|
||||
UpperCase,
|
||||
/// -> `PascalCase`
|
||||
PascalCase,
|
||||
/// -> `camelCase`
|
||||
CamelCase,
|
||||
/// -> `snake_case`
|
||||
SnakeCase,
|
||||
/// -> `SCREAMING_SNAKE_CASE`
|
||||
ScreamingSnakeCase,
|
||||
/// -> `kebab-case`
|
||||
KebabCase,
|
||||
/// -> `SCREAMING-KEBAB-CASE`
|
||||
ScreamingKebabCase,
|
||||
/// Leaves input as-is
|
||||
Identity,
|
||||
}
|
||||
|
||||
impl RenameRule {
|
||||
/// Apply a renaming rule to a string, returning the version expected in the
|
||||
/// source.
|
||||
///
|
||||
/// See tests for the details how it will work.
|
||||
pub fn apply(self, input: &str) -> String {
|
||||
use RenameRule::*;
|
||||
|
||||
match self {
|
||||
LowerCase => input.to_lowercase(),
|
||||
UpperCase => input.to_uppercase(),
|
||||
PascalCase => input.to_pascal_case(),
|
||||
CamelCase => input.to_lower_camel_case(),
|
||||
SnakeCase => input.to_snake_case(),
|
||||
ScreamingSnakeCase => input.to_shouty_snake_case(),
|
||||
KebabCase => input.to_kebab_case(),
|
||||
ScreamingKebabCase => input.to_shouty_kebab_case(),
|
||||
Identity => input.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(rule: &str) -> Result<Self> {
|
||||
use RenameRule::*;
|
||||
|
||||
let rule = match rule {
|
||||
"lowercase" => LowerCase,
|
||||
"UPPERCASE" => UpperCase,
|
||||
"PascalCase" => PascalCase,
|
||||
"camelCase" => CamelCase,
|
||||
"snake_case" => SnakeCase,
|
||||
"SCREAMING_SNAKE_CASE" => ScreamingSnakeCase,
|
||||
"kebab-case" => KebabCase,
|
||||
"SCREAMING-KEBAB-CASE" => ScreamingKebabCase,
|
||||
"identity" => Identity,
|
||||
invalid => {
|
||||
return Err(compile_error(format!(
|
||||
"invalid rename rule `{invalid}` (supported rules: \
|
||||
`lowercase`, `UPPERCASE`, `PascalCase`, `camelCase`, \
|
||||
`snake_case`, `SCREAMING_SNAKE_CASE`, `kebab-case`, \
|
||||
`SCREAMING-KEBAB-CASE` and `identity`)"
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(rule)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,8 +82,10 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
macro_rules! test_eq {
|
||||
($lval:expr, $rval:expr) => {
|
||||
assert_eq!(rename_by_rule($lval, TYPE), $rval);
|
||||
($input:expr => $output:expr) => {
|
||||
let rule = RenameRule::parse(TYPE).unwrap();
|
||||
|
||||
assert_eq!(rule.apply($input), $output);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -39,79 +93,79 @@ mod tests {
|
|||
fn test_lowercase() {
|
||||
const TYPE: &str = "lowercase";
|
||||
|
||||
test_eq!("HelloWorld", "helloworld");
|
||||
test_eq!("Hello_World", "hello_world");
|
||||
test_eq!("Hello-World", "hello-world");
|
||||
test_eq!("helloWorld", "helloworld");
|
||||
test_eq!("HelloWorld" => "helloworld");
|
||||
test_eq!("Hello_World" => "hello_world");
|
||||
test_eq!("Hello-World" => "hello-world");
|
||||
test_eq!("helloWorld" => "helloworld");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_uppercase() {
|
||||
const TYPE: &str = "UPPERCASE";
|
||||
|
||||
test_eq!("HelloWorld", "HELLOWORLD");
|
||||
test_eq!("Hello_World", "HELLO_WORLD");
|
||||
test_eq!("Hello-World", "HELLO-WORLD");
|
||||
test_eq!("helloWorld", "HELLOWORLD");
|
||||
test_eq!("HelloWorld" => "HELLOWORLD");
|
||||
test_eq!("Hello_World" => "HELLO_WORLD");
|
||||
test_eq!("Hello-World" => "HELLO-WORLD");
|
||||
test_eq!("helloWorld" => "HELLOWORLD");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pascalcase() {
|
||||
const TYPE: &str = "PascalCase";
|
||||
|
||||
test_eq!("HelloWorld", "HelloWorld");
|
||||
test_eq!("Hello_World", "HelloWorld");
|
||||
test_eq!("Hello-World", "HelloWorld");
|
||||
test_eq!("helloWorld", "HelloWorld");
|
||||
test_eq!("HelloWorld" => "HelloWorld");
|
||||
test_eq!("Hello_World" => "HelloWorld");
|
||||
test_eq!("Hello-World" => "HelloWorld");
|
||||
test_eq!("helloWorld" => "HelloWorld");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camelcase() {
|
||||
const TYPE: &str = "camelCase";
|
||||
|
||||
test_eq!("HelloWorld", "helloWorld");
|
||||
test_eq!("Hello_World", "helloWorld");
|
||||
test_eq!("Hello-World", "helloWorld");
|
||||
test_eq!("helloWorld", "helloWorld");
|
||||
test_eq!("HelloWorld" => "helloWorld");
|
||||
test_eq!("Hello_World" => "helloWorld");
|
||||
test_eq!("Hello-World" => "helloWorld");
|
||||
test_eq!("helloWorld" => "helloWorld");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_snakecase() {
|
||||
const TYPE: &str = "snake_case";
|
||||
|
||||
test_eq!("HelloWorld", "hello_world");
|
||||
test_eq!("Hello_World", "hello_world");
|
||||
test_eq!("Hello-World", "hello_world");
|
||||
test_eq!("helloWorld", "hello_world");
|
||||
test_eq!("HelloWorld" => "hello_world");
|
||||
test_eq!("Hello_World" => "hello_world");
|
||||
test_eq!("Hello-World" => "hello_world");
|
||||
test_eq!("helloWorld" => "hello_world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_screaming_snakecase() {
|
||||
const TYPE: &str = "SCREAMING_SNAKE_CASE";
|
||||
|
||||
test_eq!("HelloWorld", "HELLO_WORLD");
|
||||
test_eq!("Hello_World", "HELLO_WORLD");
|
||||
test_eq!("Hello-World", "HELLO_WORLD");
|
||||
test_eq!("helloWorld", "HELLO_WORLD");
|
||||
test_eq!("HelloWorld" => "HELLO_WORLD");
|
||||
test_eq!("Hello_World" => "HELLO_WORLD");
|
||||
test_eq!("Hello-World" => "HELLO_WORLD");
|
||||
test_eq!("helloWorld" => "HELLO_WORLD");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_kebabcase() {
|
||||
const TYPE: &str = "kebab-case";
|
||||
|
||||
test_eq!("HelloWorld", "hello-world");
|
||||
test_eq!("Hello_World", "hello-world");
|
||||
test_eq!("Hello-World", "hello-world");
|
||||
test_eq!("helloWorld", "hello-world");
|
||||
test_eq!("HelloWorld" => "hello-world");
|
||||
test_eq!("Hello_World" => "hello-world");
|
||||
test_eq!("Hello-World" => "hello-world");
|
||||
test_eq!("helloWorld" => "hello-world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_screaming_kebabcase() {
|
||||
const TYPE: &str = "SCREAMING-KEBAB-CASE";
|
||||
|
||||
test_eq!("HelloWorld", "HELLO-WORLD");
|
||||
test_eq!("Hello_World", "HELLO-WORLD");
|
||||
test_eq!("Hello-World", "HELLO-WORLD");
|
||||
test_eq!("helloWorld", "HELLO-WORLD");
|
||||
test_eq!("HelloWorld" => "HELLO-WORLD");
|
||||
test_eq!("Hello_World" => "HELLO-WORLD");
|
||||
test_eq!("Hello-World" => "HELLO-WORLD");
|
||||
test_eq!("helloWorld" => "HELLO-WORLD");
|
||||
}
|
||||
}
|
||||
|
|
20
src/unzip.rs
Normal file
20
src/unzip.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
use std::iter::FromIterator;
|
||||
|
||||
pub(crate) struct Unzip<A, B>(pub A, pub B);
|
||||
|
||||
impl<A, B, T, U> FromIterator<(T, U)> for Unzip<A, B>
|
||||
where
|
||||
A: Default + Extend<T>,
|
||||
B: Default + Extend<U>,
|
||||
{
|
||||
fn from_iter<I: IntoIterator<Item = (T, U)>>(iter: I) -> Self {
|
||||
let (mut a, mut b): (A, B) = Default::default();
|
||||
|
||||
for (t, u) in iter {
|
||||
a.extend([t]);
|
||||
b.extend([u]);
|
||||
}
|
||||
|
||||
Unzip(a, b)
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue