Merge branch 'master' into pragma

This commit is contained in:
gwenn 2019-02-16 18:52:03 +01:00 committed by GitHub
commit 1775688ac2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 206 additions and 112 deletions

View File

@ -11,7 +11,7 @@ an interface similar to [rust-postgres](https://github.com/sfackler/rust-postgre
```rust
use rusqlite::types::ToSql;
use rusqlite::{Connection, NO_PARAMS};
use rusqlite::{Connection, Result, NO_PARAMS};
use time::Timespec;
#[derive(Debug)]
@ -22,8 +22,8 @@ struct Person {
data: Option<Vec<u8>>,
}
fn main() {
let conn = Connection::open_in_memory().unwrap();
fn main() -> Result<()> {
let conn = Connection::open_in_memory()?;
conn.execute(
"CREATE TABLE person (
@ -33,7 +33,7 @@ fn main() {
data BLOB
)",
NO_PARAMS,
).unwrap();
)?;
let me = Person {
id: 0,
name: "Steven".to_string(),
@ -44,22 +44,22 @@ fn main() {
"INSERT INTO person (name, time_created, data)
VALUES (?1, ?2, ?3)",
&[&me.name as &ToSql, &me.time_created, &me.data],
).unwrap();
)?;
let mut stmt = conn
.prepare("SELECT id, name, time_created, data FROM person")
.unwrap();
.prepare("SELECT id, name, time_created, data FROM person")?;
let person_iter = stmt
.query_map(NO_PARAMS, |row| Person {
id: row.get(0),
name: row.get(1),
time_created: row.get(2),
data: row.get(3),
}).unwrap();
})?;
for person in person_iter {
println!("Found person {:?}", person.unwrap());
}
Ok(())
}
```

View File

@ -35,8 +35,8 @@ test_script:
- cargo test --lib --verbose --features bundled
- cargo test --lib --features "backup blob chrono functions hooks limits load_extension serde_json trace"
- cargo test --lib --features "backup blob chrono functions hooks limits load_extension serde_json trace buildtime_bindgen"
- cargo test --lib --features "backup blob chrono csvtab functions hooks limits load_extension serde_json trace vtab_v3 bundled"
- cargo test --lib --features "backup blob chrono csvtab functions hooks limits load_extension serde_json trace vtab_v3 bundled buildtime_bindgen"
- cargo test --lib --features "backup blob chrono csvtab functions hooks limits load_extension serde_json trace vtab bundled"
- cargo test --lib --features "backup blob chrono csvtab functions hooks limits load_extension serde_json trace vtab bundled buildtime_bindgen"
cache:
- C:\Users\appveyor\.cargo

View File

@ -134,8 +134,14 @@ mod build {
}
// Allow users to specify where to find SQLite.
if let Ok(dir) = env::var(format!("{}_LIB_DIR", env_prefix())) {
println!("cargo:rustc-link-lib={}={}", find_link_mode(), link_lib);
println!("cargo:rustc-link-search={}", dir);
// Try to use pkg-config to determine link commands
let pkgconfig_path = Path::new(&dir).join("pkgconfig");
env::set_var("PKG_CONFIG_PATH", pkgconfig_path);
if let Err(_) = pkg_config::Config::new().probe(link_lib) {
// Otherwise just emit the bare minimum link commands.
println!("cargo:rustc-link-lib={}={}", find_link_mode(), link_lib);
println!("cargo:rustc-link-search={}", dir);
}
return HeaderLocation::FromEnvironment;
}

View File

@ -167,7 +167,7 @@ pub struct Backup<'a, 'b> {
b: *mut ffi::sqlite3_backup,
}
impl<'a, 'b> Backup<'a, 'b> {
impl Backup<'_, '_> {
/// Attempt to create a new handle that will allow backups from `from` to
/// `to`. Note that `to` is a `&mut` - this is because SQLite forbids any
/// API calls on the destination of a backup while the backup is taking
@ -177,7 +177,7 @@ impl<'a, 'b> Backup<'a, 'b> {
///
/// Will return `Err` if the underlying `sqlite3_backup_init` call returns
/// `NULL`.
pub fn new(from: &'a Connection, to: &'b mut Connection) -> Result<Backup<'a, 'b>> {
pub fn new<'a, 'b>(from: &'a Connection, to: &'b mut Connection) -> Result<Backup<'a, 'b>> {
Backup::new_with_names(from, DatabaseName::Main, to, DatabaseName::Main)
}
@ -190,7 +190,7 @@ impl<'a, 'b> Backup<'a, 'b> {
///
/// Will return `Err` if the underlying `sqlite3_backup_init` call returns
/// `NULL`.
pub fn new_with_names(
pub fn new_with_names<'a, 'b>(
from: &'a Connection,
from_name: DatabaseName<'_>,
to: &'b mut Connection,
@ -294,7 +294,7 @@ impl<'a, 'b> Backup<'a, 'b> {
}
}
impl<'a, 'b> Drop for Backup<'a, 'b> {
impl Drop for Backup<'_, '_> {
fn drop(&mut self) {
unsafe { ffi::sqlite3_backup_finish(self.b) };
}

View File

@ -16,43 +16,41 @@
//! ```rust
//! use rusqlite::blob::ZeroBlob;
//! use rusqlite::{Connection, DatabaseName, NO_PARAMS};
//! use std::error::Error;
//! use std::io::{Read, Seek, SeekFrom, Write};
//!
//! fn main() {
//! let db = Connection::open_in_memory().unwrap();
//! db.execute_batch("CREATE TABLE test (content BLOB);")
//! .unwrap();
//! fn main() -> Result<(), Box<Error>> {
//! let db = Connection::open_in_memory()?;
//! db.execute_batch("CREATE TABLE test (content BLOB);")?;
//! db.execute(
//! "INSERT INTO test (content) VALUES (ZEROBLOB(10))",
//! NO_PARAMS,
//! )
//! .unwrap();
//! )?;
//!
//! let rowid = db.last_insert_rowid();
//! let mut blob = db
//! .blob_open(DatabaseName::Main, "test", "content", rowid, false)
//! .unwrap();
//! .blob_open(DatabaseName::Main, "test", "content", rowid, false)?;
//!
//! // Make sure to test that the number of bytes written matches what you expect;
//! // if you try to write too much, the data will be truncated to the size of the
//! // BLOB.
//! let bytes_written = blob.write(b"01234567").unwrap();
//! let bytes_written = blob.write(b"01234567")?;
//! assert_eq!(bytes_written, 8);
//!
//! // Same guidance - make sure you check the number of bytes read!
//! blob.seek(SeekFrom::Start(0)).unwrap();
//! blob.seek(SeekFrom::Start(0))?;
//! let mut buf = [0u8; 20];
//! let bytes_read = blob.read(&mut buf[..]).unwrap();
//! let bytes_read = blob.read(&mut buf[..])?;
//! assert_eq!(bytes_read, 10); // note we read 10 bytes because the blob has size 10
//!
//! db.execute("INSERT INTO test (content) VALUES (?)", &[ZeroBlob(64)])
//! .unwrap();
//! db.execute("INSERT INTO test (content) VALUES (?)", &[ZeroBlob(64)])?;
//!
//! // given a new row ID, we can reopen the blob on that row
//! let rowid = db.last_insert_rowid();
//! blob.reopen(rowid).unwrap();
//! blob.reopen(rowid)?;
//!
//! assert_eq!(blob.size(), 64);
//! Ok(())
//! }
//! ```
use std::cmp::min;
@ -111,7 +109,7 @@ impl Connection {
}
}
impl<'conn> Blob<'conn> {
impl Blob<'_> {
/// Move a BLOB handle to a new row.
///
/// # Failure
@ -151,7 +149,7 @@ impl<'conn> Blob<'conn> {
}
}
impl<'conn> io::Read for Blob<'conn> {
impl io::Read for Blob<'_> {
/// Read data from a BLOB incrementally. Will return Ok(0) if the end of
/// the blob has been reached.
///
@ -175,7 +173,7 @@ impl<'conn> io::Read for Blob<'conn> {
}
}
impl<'conn> io::Write for Blob<'conn> {
impl io::Write for Blob<'_> {
/// Write data into a BLOB incrementally. Will return `Ok(0)` if the end of
/// the blob has been reached; consider using `Write::write_all(buf)`
/// if you want to get an error if the entirety of the buffer cannot be
@ -208,7 +206,7 @@ impl<'conn> io::Write for Blob<'conn> {
}
}
impl<'conn> io::Seek for Blob<'conn> {
impl io::Seek for Blob<'_> {
/// Seek to an offset, in bytes, in BLOB.
fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {
let pos = match pos {
@ -235,7 +233,7 @@ impl<'conn> io::Seek for Blob<'conn> {
}
#[allow(unused_must_use)]
impl<'conn> Drop for Blob<'conn> {
impl Drop for Blob<'_> {
fn drop(&mut self) {
self.close_();
}

View File

@ -79,7 +79,7 @@ impl<'conn> DerefMut for CachedStatement<'conn> {
}
}
impl<'conn> Drop for CachedStatement<'conn> {
impl Drop for CachedStatement<'_> {
#[allow(unused_must_use)]
fn drop(&mut self) {
if let Some(stmt) = self.stmt.take() {
@ -88,8 +88,8 @@ impl<'conn> Drop for CachedStatement<'conn> {
}
}
impl<'conn> CachedStatement<'conn> {
fn new(stmt: Statement<'conn>, cache: &'conn StatementCache) -> CachedStatement<'conn> {
impl CachedStatement<'_> {
fn new<'conn>(stmt: Statement<'conn>, cache: &'conn StatementCache) -> CachedStatement<'conn> {
CachedStatement {
stmt: Some(stmt),
cache,

88
src/config.rs Normal file
View File

@ -0,0 +1,88 @@
//! Configure database connections
use std::os::raw::c_int;
use crate::ffi;
use crate::{Connection, Result};
/// Database Connection Configuration Options
#[repr(i32)]
#[allow(non_snake_case, non_camel_case_types)]
pub enum DbConfig {
//SQLITE_DBCONFIG_MAINDBNAME = 1000, /* const char* */
//SQLITE_DBCONFIG_LOOKASIDE = 1001, /* void* int int */
SQLITE_DBCONFIG_ENABLE_FKEY = 1002,
SQLITE_DBCONFIG_ENABLE_TRIGGER = 1003,
SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER = 1004, // 3.12.0
//SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION = 1005,
SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE = 1006,
SQLITE_DBCONFIG_ENABLE_QPSG = 1007, // 3.20.0
SQLITE_DBCONFIG_TRIGGER_EQP = 1008,
//SQLITE_DBCONFIG_RESET_DATABASE = 1009,
SQLITE_DBCONFIG_DEFENSIVE = 1010,
}
impl Connection {
/// Returns the current value of a `config`.
///
/// - SQLITE_DBCONFIG_ENABLE_FKEY: return `false` or `true` to indicate whether FK enforcement is off or on
/// - SQLITE_DBCONFIG_ENABLE_TRIGGER: return `false` or `true` to indicate whether triggers are disabled or enabled
/// - SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: return `false` or `true` to indicate whether fts3_tokenizer are disabled or enabled
/// - SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: return `false` to indicate checkpoints-on-close are not disabled or `true` if they are
/// - SQLITE_DBCONFIG_ENABLE_QPSG: return `false` or `true` to indicate whether the QPSG is disabled or enabled
/// - SQLITE_DBCONFIG_TRIGGER_EQP: return `false` to indicate output-for-trigger are not disabled or `true` if it is
pub fn db_config(&self, config: DbConfig) -> Result<bool> {
let c = self.db.borrow();
unsafe {
let mut val = 0;
check!(ffi::sqlite3_db_config(
c.db(),
config as c_int,
-1,
&mut val
));
Ok(val != 0)
}
}
/// Make configuration changes to a database connection
///
/// - SQLITE_DBCONFIG_ENABLE_FKEY: `false` to disable FK enforcement, `true` to enable FK enforcement
/// - SQLITE_DBCONFIG_ENABLE_TRIGGER: `false` to disable triggers, `true` to enable triggers
/// - SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER: `false` to disable fts3_tokenizer(), `true` to enable fts3_tokenizer()
/// - SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE: `false` (the default) to enable checkpoints-on-close, `true` to disable them
/// - SQLITE_DBCONFIG_ENABLE_QPSG: `false` to disable the QPSG, `true` to enable QPSG
/// - SQLITE_DBCONFIG_TRIGGER_EQP: `false` to disable output for trigger programs, `true` to enable it
pub fn set_db_config(&self, config: DbConfig, new_val: bool) -> Result<bool> {
let c = self.db.borrow_mut();
unsafe {
let mut val = 0;
check!(ffi::sqlite3_db_config(
c.db(),
config as c_int,
if new_val { 1 } else { 0 },
&mut val
));
Ok(val != 0)
}
}
}
#[cfg(test)]
mod test {
use super::DbConfig;
use crate::Connection;
#[test]
fn test_db_config() {
let db = Connection::open_in_memory().unwrap();
let opposite = !db.db_config(DbConfig::SQLITE_DBCONFIG_ENABLE_FKEY).unwrap();
assert_eq!(db.set_db_config(DbConfig::SQLITE_DBCONFIG_ENABLE_FKEY, opposite), Ok(opposite));
assert_eq!(db.db_config(DbConfig::SQLITE_DBCONFIG_ENABLE_FKEY), Ok(opposite));
let opposite = !db.db_config(DbConfig::SQLITE_DBCONFIG_ENABLE_TRIGGER).unwrap();
assert_eq!(db.set_db_config(DbConfig::SQLITE_DBCONFIG_ENABLE_TRIGGER, opposite), Ok(opposite));
assert_eq!(db.db_config(DbConfig::SQLITE_DBCONFIG_ENABLE_TRIGGER), Ok(opposite));
}
}

View File

@ -34,19 +34,19 @@
//! })
//! }
//!
//! fn main() {
//! let db = Connection::open_in_memory().unwrap();
//! add_regexp_function(&db).unwrap();
//! fn main() -> Result<()> {
//! let db = Connection::open_in_memory()?;
//! add_regexp_function(&db)?;
//!
//! let is_match: bool = db
//! .query_row(
//! "SELECT regexp('[aeiou]*', 'aaaaeeeiii')",
//! NO_PARAMS,
//! |row| row.get(0),
//! )
//! .unwrap();
//! )?;
//!
//! assert!(is_match);
//! Ok(())
//! }
//! ```
use std::error::Error as StdError;
@ -104,7 +104,7 @@ pub struct Context<'a> {
args: &'a [*mut sqlite3_value],
}
impl<'a> Context<'a> {
impl Context<'_> {
/// Returns the number of arguments to the function.
pub fn len(&self) -> usize {
self.args.len()
@ -146,7 +146,7 @@ impl<'a> Context<'a> {
/// # Failure
///
/// Will panic if `idx` is greater than or equal to `self.len()`.
pub fn get_raw(&self, idx: usize) -> ValueRef<'a> {
pub fn get_raw(&self, idx: usize) -> ValueRef<'_> {
let arg = self.args[idx];
unsafe { ValueRef::from_value(arg) }
}

View File

@ -5,7 +5,7 @@ use std::os::raw::c_int;
use std::path::Path;
use std::ptr;
use std::str;
use std::sync::atomic::{AtomicBool, Ordering, ATOMIC_BOOL_INIT};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex, Once, ONCE_INIT};
use super::ffi;
@ -292,7 +292,7 @@ impl Drop for InnerConnection {
#[cfg(not(feature = "bundled"))]
static SQLITE_VERSION_CHECK: Once = ONCE_INIT;
#[cfg(not(feature = "bundled"))]
pub static BYPASS_VERSION_CHECK: AtomicBool = ATOMIC_BOOL_INIT;
pub static BYPASS_VERSION_CHECK: AtomicBool = AtomicBool::new(false);
#[cfg(not(feature = "bundled"))]
fn ensure_valid_sqlite_version() {
@ -339,7 +339,7 @@ rusqlite was built against SQLite {} but the runtime SQLite version is {}. To fi
}
static SQLITE_INIT: Once = ONCE_INIT;
pub static BYPASS_SQLITE_INIT: AtomicBool = ATOMIC_BOOL_INIT;
pub static BYPASS_SQLITE_INIT: AtomicBool = AtomicBool::new(false);
fn ensure_safe_sqlite_threading_mode() -> Result<()> {
// Ensure SQLite was compiled in thredsafe mode.

View File

@ -3,7 +3,7 @@
//!
//! ```rust
//! use rusqlite::types::ToSql;
//! use rusqlite::{params, Connection};
//! use rusqlite::{params, Connection, Result};
//! use time::Timespec;
//!
//! #[derive(Debug)]
@ -14,8 +14,8 @@
//! data: Option<Vec<u8>>,
//! }
//!
//! fn main() {
//! let conn = Connection::open_in_memory().unwrap();
//! fn main() -> Result<()> {
//! let conn = Connection::open_in_memory()?;
//!
//! conn.execute(
//! "CREATE TABLE person (
@ -25,8 +25,7 @@
//! data BLOB
//! )",
//! params![],
//! )
//! .unwrap();
//! )?;
//! let me = Person {
//! id: 0,
//! name: "Steven".to_string(),
@ -37,24 +36,22 @@
//! "INSERT INTO person (name, time_created, data)
//! VALUES (?1, ?2, ?3)",
//! params![me.name, me.time_created, me.data],
//! )
//! .unwrap();
//! )?;
//!
//! let mut stmt = conn
//! .prepare("SELECT id, name, time_created, data FROM person")
//! .unwrap();
//! .prepare("SELECT id, name, time_created, data FROM person")?;
//! let person_iter = stmt
//! .query_map(params![], |row| Person {
//! id: row.get(0),
//! name: row.get(1),
//! time_created: row.get(2),
//! data: row.get(3),
//! })
//! .unwrap();
//! })?;
//!
//! for person in person_iter {
//! println!("Found person {:?}", person.unwrap());
//! }
//! Ok(())
//! }
//! ```
#![allow(unknown_lints)]
@ -98,16 +95,18 @@ pub use crate::transaction::{DropBehavior, Savepoint, Transaction, TransactionBe
pub use crate::types::ToSql;
pub use crate::version::*;
#[macro_use]
mod error;
#[cfg(feature = "backup")]
pub mod backup;
#[cfg(feature = "blob")]
pub mod blob;
mod busy;
mod cache;
pub mod config;
#[cfg(any(feature = "functions", feature = "vtab"))]
mod context;
#[macro_use]
mod error;
#[cfg(feature = "functions")]
pub mod functions;
#[cfg(feature = "hooks")]
@ -265,7 +264,7 @@ pub enum DatabaseName<'a> {
feature = "session",
feature = "bundled"
))]
impl<'a> DatabaseName<'a> {
impl DatabaseName<'_> {
fn to_cstring(&self) -> Result<CString> {
use self::DatabaseName::{Attached, Main, Temp};
match *self {
@ -689,7 +688,7 @@ impl Connection {
/// Return the number of rows modified, inserted or deleted by the most
/// recently completed INSERT, UPDATE or DELETE statement on the database
/// connection.
pub fn changes(&self) -> usize {
fn changes(&self) -> usize {
self.db.borrow_mut().changes()
}

View File

@ -17,7 +17,7 @@ pub struct LoadExtensionGuard<'conn> {
conn: &'conn Connection,
}
impl<'conn> LoadExtensionGuard<'conn> {
impl LoadExtensionGuard<'_> {
/// Attempt to enable loading extensions. Loading extensions will be
/// disabled when this guard goes out of scope. Cannot be meaningfully
/// nested.
@ -28,7 +28,7 @@ impl<'conn> LoadExtensionGuard<'conn> {
}
#[allow(unused_must_use)]
impl<'conn> Drop for LoadExtensionGuard<'conn> {
impl Drop for LoadExtensionGuard<'_> {
fn drop(&mut self) {
self.conn.load_extension_disable();
}

View File

@ -59,7 +59,7 @@ impl<'stmt> Rows<'stmt> {
}
}
impl<'stmt> Drop for Rows<'stmt> {
impl Drop for Rows<'_> {
fn drop(&mut self) {
self.reset();
}
@ -80,7 +80,7 @@ where
}
}
impl<'conn, T, F> Iterator for MappedRows<'conn, F>
impl<T, F> Iterator for MappedRows<'_, F>
where
F: FnMut(&Row<'_, '_>) -> T,
{
@ -110,7 +110,7 @@ where
}
}
impl<'stmt, T, E, F> Iterator for AndThenRows<'stmt, F>
impl<T, E, F> Iterator for AndThenRows<'_, F>
where
E: convert::From<Error>,
F: FnMut(&Row<'_, '_>) -> result::Result<T, E>,
@ -139,11 +139,11 @@ impl<'a, 'stmt> Row<'a, 'stmt> {
/// Panics if calling `row.get_checked(idx)` would return an error,
/// including:
///
/// * If the underlying SQLite column type is not a valid type as a
/// source for `T`
/// * If the underlying SQLite integral value is outside the range
/// representable by `T`
/// * If `idx` is outside the range of columns in the returned query
/// * If the underlying SQLite column type is not a valid type as a
/// source for `T`
/// * If the underlying SQLite integral value is outside the range
/// representable by `T`
/// * If `idx` is outside the range of columns in the returned query
pub fn get<I: RowIndex, T: FromSql>(&self, idx: I) -> T {
self.get_checked(idx).unwrap()
}
@ -215,8 +215,8 @@ impl<'a, 'stmt> Row<'a, 'stmt> {
/// Panics if calling `row.get_raw_checked(idx)` would return an error,
/// including:
///
/// * If `idx` is outside the range of columns in the returned query.
/// * If `idx` is not a valid column name for this row.
/// * If `idx` is outside the range of columns in the returned query.
/// * If `idx` is not a valid column name for this row.
pub fn get_raw<I: RowIndex>(&self, idx: I) -> ValueRef<'a> {
self.get_raw_checked(idx).unwrap()
}
@ -245,7 +245,7 @@ impl RowIndex for usize {
}
}
impl<'a> RowIndex for &'a str {
impl RowIndex for &'_ str {
#[inline]
fn idx(&self, stmt: &Statement<'_>) -> Result<usize> {
stmt.column_index(*self)

View File

@ -28,14 +28,17 @@ pub struct Session<'conn> {
filter: Option<Box<dyn Fn(&str) -> bool>>,
}
impl<'conn> Session<'conn> {
impl Session<'_> {
/// Create a new session object
pub fn new(db: &'conn Connection) -> Result<Session<'conn>> {
pub fn new<'conn>(db: &'conn Connection) -> Result<Session<'conn>> {
Session::new_with_name(db, DatabaseName::Main)
}
/// Create a new session object
pub fn new_with_name(db: &'conn Connection, name: DatabaseName<'_>) -> Result<Session<'conn>> {
pub fn new_with_name<'conn>(
db: &'conn Connection,
name: DatabaseName<'_>,
) -> Result<Session<'conn>> {
let name = name.to_cstring()?;
let db = db.db.borrow_mut().db;
@ -196,7 +199,7 @@ impl<'conn> Session<'conn> {
}
}
impl<'conn> Drop for Session<'conn> {
impl Drop for Session<'_> {
fn drop(&mut self) {
if self.filter.is_some() {
self.table_filter(None::<fn(&str) -> bool>);
@ -292,7 +295,7 @@ pub struct ChangesetIter<'changeset> {
item: Option<ChangesetItem>,
}
impl<'changeset> ChangesetIter<'changeset> {
impl ChangesetIter<'_> {
/// Create an iterator on `input`
pub fn start_strm<'input>(input: &'input mut dyn Read) -> Result<ChangesetIter<'input>> {
let input_ref = &input;
@ -312,7 +315,7 @@ impl<'changeset> ChangesetIter<'changeset> {
}
}
impl<'changeset> FallibleStreamingIterator for ChangesetIter<'changeset> {
impl FallibleStreamingIterator for ChangesetIter<'_> {
type Error = crate::error::Error;
type Item = ChangesetItem;
@ -343,7 +346,7 @@ pub struct Operation<'item> {
indirect: bool,
}
impl<'item> Operation<'item> {
impl Operation<'_> {
pub fn table_name(&self) -> &str {
self.table_name
}
@ -361,7 +364,7 @@ impl<'item> Operation<'item> {
}
}
impl<'changeset> Drop for ChangesetIter<'changeset> {
impl Drop for ChangesetIter<'_> {
fn drop(&mut self) {
unsafe {
ffi::sqlite3changeset_finalize(self.it);

View File

@ -21,7 +21,7 @@ pub struct Statement<'conn> {
stmt: RawStatement,
}
impl<'conn> Statement<'conn> {
impl Statement<'_> {
/// Get all the column names in the result set of the prepared statement.
pub fn column_names(&self) -> Vec<&str> {
let n = self.column_count();
@ -234,7 +234,7 @@ impl<'conn> Statement<'conn> {
/// # Failure
///
/// Will return `Err` if binding parameters fails.
pub fn query_named<'a>(&'a mut self, params: &[(&str, &dyn ToSql)]) -> Result<Rows<'a>> {
pub fn query_named(&mut self, params: &[(&str, &dyn ToSql)]) -> Result<Rows<'_>> {
self.check_readonly()?;
self.bind_parameters_named(params)?;
Ok(Rows::new(self))
@ -300,11 +300,11 @@ impl<'conn> Statement<'conn> {
/// ## Failure
///
/// Will return `Err` if binding parameters fails.
pub fn query_map_named<'a, T, F>(
&'a mut self,
pub fn query_map_named<T, F>(
&mut self,
params: &[(&str, &dyn ToSql)],
f: F,
) -> Result<MappedRows<'a, F>>
) -> Result<MappedRows<'_, F>>
where
F: FnMut(&Row<'_, '_>) -> T,
{
@ -368,11 +368,11 @@ impl<'conn> Statement<'conn> {
/// ## Failure
///
/// Will return `Err` if binding parameters fails.
pub fn query_and_then_named<'a, T, E, F>(
&'a mut self,
pub fn query_and_then_named<T, E, F>(
&mut self,
params: &[(&str, &dyn ToSql)],
f: F,
) -> Result<AndThenRows<'a, F>>
) -> Result<AndThenRows<'_, F>>
where
E: convert::From<Error>,
F: FnMut(&Row<'_, '_>) -> result::Result<T, E>,
@ -604,7 +604,7 @@ impl<'conn> Statement<'conn> {
}
}
impl<'conn> Into<RawStatement> for Statement<'conn> {
impl Into<RawStatement> for Statement<'_> {
fn into(mut self) -> RawStatement {
let mut stmt = RawStatement::new(ptr::null_mut());
mem::swap(&mut stmt, &mut self.stmt);
@ -612,7 +612,7 @@ impl<'conn> Into<RawStatement> for Statement<'conn> {
}
}
impl<'conn> fmt::Debug for Statement<'conn> {
impl fmt::Debug for Statement<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let sql = str::from_utf8(self.stmt.sql().to_bytes());
f.debug_struct("Statement")
@ -623,14 +623,14 @@ impl<'conn> fmt::Debug for Statement<'conn> {
}
}
impl<'conn> Drop for Statement<'conn> {
impl Drop for Statement<'_> {
#[allow(unused_must_use)]
fn drop(&mut self) {
self.finalize_();
}
}
impl<'conn> Statement<'conn> {
impl Statement<'_> {
pub(crate) fn new(conn: &Connection, stmt: RawStatement) -> Statement<'_> {
Statement { conn, stmt }
}

View File

@ -87,7 +87,7 @@ pub struct Savepoint<'conn> {
committed: bool,
}
impl<'conn> Transaction<'conn> {
impl Transaction<'_> {
/// Begin a new transaction. Cannot be nested; see `savepoint` for nested
/// transactions.
/// Even though we don't mutate the connection, we take a `&mut Connection`
@ -195,7 +195,7 @@ impl<'conn> Transaction<'conn> {
}
}
impl<'conn> Deref for Transaction<'conn> {
impl Deref for Transaction<'_> {
type Target = Connection;
fn deref(&self) -> &Connection {
@ -204,13 +204,13 @@ impl<'conn> Deref for Transaction<'conn> {
}
#[allow(unused_must_use)]
impl<'conn> Drop for Transaction<'conn> {
impl Drop for Transaction<'_> {
fn drop(&mut self) {
self.finish_();
}
}
impl<'conn> Savepoint<'conn> {
impl Savepoint<'_> {
fn with_depth_and_name<T: Into<String>>(
conn: &Connection,
depth: u32,
@ -308,7 +308,7 @@ impl<'conn> Savepoint<'conn> {
}
}
impl<'conn> Deref for Savepoint<'conn> {
impl Deref for Savepoint<'_> {
type Target = Connection;
fn deref(&self) -> &Connection {
@ -317,7 +317,7 @@ impl<'conn> Deref for Savepoint<'conn> {
}
#[allow(unused_must_use)]
impl<'conn> Drop for Savepoint<'conn> {
impl Drop for Savepoint<'_> {
fn drop(&mut self) {
self.finish_();
}

View File

@ -40,7 +40,7 @@ where
// be converted into Values.
macro_rules! from_value(
($t:ty) => (
impl<'a> From<$t> for ToSqlOutput<'a> {
impl From<$t> for ToSqlOutput<'_> {
fn from(t: $t) -> Self { ToSqlOutput::Owned(t.into())}
}
)
@ -65,7 +65,7 @@ from_value!(Vec<u8>);
#[cfg(feature = "i128_blob")]
from_value!(i128);
impl<'a> ToSql for ToSqlOutput<'a> {
impl ToSql for ToSqlOutput<'_> {
fn to_sql(&self) -> Result<ToSqlOutput<'_>> {
Ok(match *self {
ToSqlOutput::Borrowed(v) => ToSqlOutput::Borrowed(v),
@ -121,7 +121,7 @@ to_sql_self!(f64);
#[cfg(feature = "i128_blob")]
to_sql_self!(i128);
impl<'a, T: ?Sized> ToSql for &'a T
impl<T: ?Sized> ToSql for &'_ T
where
T: ToSql,
{
@ -169,7 +169,7 @@ impl<T: ToSql> ToSql for Option<T> {
}
}
impl<'a> ToSql for Cow<'a, str> {
impl ToSql for Cow<'_, str> {
fn to_sql(&self) -> Result<ToSqlOutput<'_>> {
Ok(ToSqlOutput::from(self.as_ref()))
}

View File

@ -19,7 +19,7 @@ pub enum ValueRef<'a> {
Blob(&'a [u8]),
}
impl<'a> ValueRef<'a> {
impl ValueRef<'_> {
pub fn data_type(&self) -> Type {
match *self {
ValueRef::Null => Type::Null,
@ -69,7 +69,7 @@ impl<'a> ValueRef<'a> {
}
}
impl<'a> From<ValueRef<'a>> for Value {
impl From<ValueRef<'_>> for Value {
fn from(borrowed: ValueRef<'_>) -> Value {
match borrowed {
ValueRef::Null => Value::Null,

View File

@ -337,7 +337,7 @@ impl<'a> Iterator for IndexConstraintIter<'a> {
/// WHERE clause constraint
pub struct IndexConstraint<'a>(&'a ffi::sqlite3_index_constraint);
impl<'a> IndexConstraint<'a> {
impl IndexConstraint<'_> {
/// Column constrained. -1 for ROWID
pub fn column(&self) -> c_int {
self.0.iColumn
@ -357,7 +357,7 @@ impl<'a> IndexConstraint<'a> {
/// Information about what parameters to pass to `VTabCursor.filter`.
pub struct IndexConstraintUsage<'a>(&'a mut ffi::sqlite3_index_constraint_usage);
impl<'a> IndexConstraintUsage<'a> {
impl IndexConstraintUsage<'_> {
/// if `argv_index` > 0, constraint is part of argv to `VTabCursor.filter`
pub fn set_argv_index(&mut self, argv_index: c_int) {
self.0.argvIndex = argv_index;
@ -388,7 +388,7 @@ impl<'a> Iterator for OrderByIter<'a> {
/// A column of the ORDER BY clause.
pub struct OrderBy<'a>(&'a ffi::sqlite3_index_info_sqlite3_index_orderby);
impl<'a> OrderBy<'a> {
impl OrderBy<'_> {
/// Column number
pub fn column(&self) -> c_int {
self.0.iColumn
@ -453,7 +453,7 @@ pub struct Values<'a> {
args: &'a [*mut ffi::sqlite3_value],
}
impl<'a> Values<'a> {
impl Values<'_> {
pub fn len(&self) -> usize {
self.args.len()
}