Compare commits

..

9 Commits

Author SHA1 Message Date
Mostafa
3fc9e5dec1 Merge branch 'main' of github.com:postgresml/pgcat into mostafa_fix_prepared_stmts 2024-09-03 18:11:32 -05:00
Mostafa Abdelraouf
69af6cc5e5 Make iterating on integration tests easier (#789)
Writing and iterating on integration tests are cumbersome, having to wait 10 minutes for the test-suite to run just to see if your test works or not is unacceptable.

In this PR, I added a detailed workflow for writing tests that should shorten the feedback cycle of modifying tests to be as low as a few seconds.

It will involve opening a shell into a long-lived container that has all the setup and dependencies necessary and then running your desired tests directly there. I added a convenience script that bootstraps the environment and then opens an interactive shell into the container and you can then run tests immediately in an environment that is more or less identical to what we have running in CircleCI
2024-09-03 11:15:53 -05:00
Mostafa
f7c5c0faf9 fix bind 2024-09-01 16:14:44 -05:00
Mostafa
982d03c374 fix syntax 2024-09-01 15:41:33 -05:00
Mostafa
686b7ca7c5 Fixes 2024-09-01 15:31:27 -05:00
Mostafa
7c55bf78fe Add failing tests 2024-09-01 14:39:05 -05:00
Mostafa Abdelraouf
ca34597002 Fix broken integration test #740 (#787) 2024-08-31 17:15:13 -05:00
Mostafa Abdelraouf
2def40ea6a Add test case for issue 776 (#786)
I am adding a tiny test that uses the SQL statement that was reported to break an older version of SQL parser library

#776
2024-08-31 10:52:33 -05:00
Mostafa Abdelraouf
c05129018d Improve Prometheus stats + Add Grafana dashboard (#785)
We were missing some labels on metrics generated by the Prometheus exporter so I fixed that. There are still some gaps that I want to address with respect to the metrics we track but this seems like a good start.

I also created a Grafana Dashboard and exported it to JSON. It is designed with the same metric names the Prometheus exporter uses.
2024-08-31 08:18:57 -05:00
14 changed files with 898 additions and 434 deletions

View File

@@ -59,6 +59,7 @@ admin_password = "admin_pass"
# session: one server connection per connected client
# transaction: one server connection per client transaction
pool_mode = "transaction"
prepared_statements_cache_size = 500
# If the client doesn't specify, route traffic to
# this role by default.
@@ -141,6 +142,7 @@ query_parser_enabled = true
query_parser_read_write_splitting = true
primary_reads_enabled = true
sharding_function = "pg_bigint_hash"
prepared_statements_cache_size = 500
[pools.simple_db.users.0]
username = "simple_user"

View File

@@ -6,6 +6,32 @@ Thank you for contributing! Just a few tips here:
2. Run the test suite (e.g. `pgbench`) to make sure everything still works. The tests are in `.circleci/run_tests.sh`.
3. Performance is important, make sure there are no regressions in your branch vs. `main`.
## How to run the integration tests locally and iterate on them
We have integration tests written in Ruby, Python, Go and Rust.
Below are the steps to run them in a developer-friendly way that allows iterating and quick turnaround.
Hear me out, this should be easy, it will involve opening a shell into a container with all the necessary dependancies available for you and you can modify the test code and immediately rerun your test in the interactive shell.
Quite simply, make sure you have docker installed and then run
`./start_test_env.sh`
That is it!
Within this test environment you can modify the file in your favorite IDE and rerun the tests without having to bootstrap the entire environment again.
Once the environment is ready, you can run the tests by running
Ruby: `cd /app/tests/ruby && bundle exec ruby <test_name>.rb --format documentation`
Python: `cd /app && python3 tests/python/tests.py`
Rust: `cd /app/tests/rust && cargo run`
Go: `cd /app/tests/go && /usr/local/go/bin/go test`
You can also rebuild PgCat directly within the environment and the tests will run against the newly built binary
To rebuild PgCat, just run `cargo build` within the container under `/app`
![Animated gif showing how to run tests](https://github.com/user-attachments/assets/2258fde3-2aed-4efb-bdc5-e4f12dcd4d33)
Happy hacking!
## TODOs

View File

@@ -1729,14 +1729,13 @@ where
/// and also the pool's statement cache. Add it to extended protocol data.
fn buffer_parse(&mut self, message: BytesMut, pool: &ConnectionPool) -> Result<(), Error> {
// Avoid parsing if prepared statements not enabled
if !self.prepared_statements_enabled {
let client_given_name = Parse::get_name(&message)?;
if !self.prepared_statements_enabled || client_given_name.is_empty() {
debug!("Anonymous parse message");
self.extended_protocol_data_buffer
.push_back(ExtendedProtocolData::create_new_parse(message, None));
return Ok(());
}
let client_given_name = Parse::get_name(&message)?;
let parse: Parse = (&message).try_into()?;
// Compute the hash of the parse statement
@@ -1774,15 +1773,14 @@ where
/// saved in the client cache.
async fn buffer_bind(&mut self, message: BytesMut) -> Result<(), Error> {
// Avoid parsing if prepared statements not enabled
if !self.prepared_statements_enabled {
let client_given_name = Bind::get_name(&message)?;
if !self.prepared_statements_enabled || client_given_name.is_empty() {
debug!("Anonymous bind message");
self.extended_protocol_data_buffer
.push_back(ExtendedProtocolData::create_new_bind(message, None));
return Ok(());
}
let client_given_name = Bind::get_name(&message)?;
match self.prepared_statements.get(&client_given_name) {
Some((rewritten_parse, _)) => {
let message = Bind::rename(message, &rewritten_parse.name)?;
@@ -1834,7 +1832,8 @@ where
}
let describe: Describe = (&message).try_into()?;
if describe.target == 'P' {
let client_given_name = describe.statement_name.clone();
if describe.target == 'P' || client_given_name.is_empty() {
debug!("Portal describe message");
self.extended_protocol_data_buffer
.push_back(ExtendedProtocolData::create_new_describe(message, None));
@@ -1842,8 +1841,6 @@ where
return Ok(());
}
let client_given_name = describe.statement_name.clone();
match self.prepared_statements.get(&client_given_name) {
Some((rewritten_parse, _)) => {
let describe = describe.rename(&rewritten_parse.name);

View File

@@ -821,10 +821,10 @@ impl ExtendedProtocolData {
pub struct Parse {
code: char,
#[allow(dead_code)]
len: i32,
len: u32,
pub name: String,
query: String,
num_params: i16,
num_params: u16,
param_types: Vec<i32>,
}
@@ -834,12 +834,11 @@ impl TryFrom<&BytesMut> for Parse {
fn try_from(buf: &BytesMut) -> Result<Parse, Error> {
let mut cursor = Cursor::new(buf);
let code = cursor.get_u8() as char;
let len = cursor.get_i32();
let len = cursor.get_u32();
let name = cursor.read_string()?;
let query = cursor.read_string()?;
let num_params = cursor.get_i16();
let num_params = cursor.get_u16();
let mut param_types = Vec::new();
for _ in 0..num_params {
param_types.push(cursor.get_i32());
}
@@ -875,10 +874,10 @@ impl TryFrom<Parse> for BytesMut {
+ 4 * parse.num_params as usize;
bytes.put_u8(parse.code as u8);
bytes.put_i32(len as i32);
bytes.put_u32(len as u32);
bytes.put_slice(name);
bytes.put_slice(query);
bytes.put_i16(parse.num_params);
bytes.put_u16(parse.num_params);
for param in parse.param_types {
bytes.put_i32(param);
}
@@ -945,14 +944,14 @@ impl Parse {
pub struct Bind {
code: char,
#[allow(dead_code)]
len: i64,
len: u64,
portal: String,
pub prepared_statement: String,
num_param_format_codes: i16,
num_param_format_codes: u16,
param_format_codes: Vec<i16>,
num_param_values: i16,
num_param_values: u16,
param_values: Vec<(i32, BytesMut)>,
num_result_column_format_codes: i16,
num_result_column_format_codes: u16,
result_columns_format_codes: Vec<i16>,
}
@@ -962,17 +961,17 @@ impl TryFrom<&BytesMut> for Bind {
fn try_from(buf: &BytesMut) -> Result<Bind, Error> {
let mut cursor = Cursor::new(buf);
let code = cursor.get_u8() as char;
let len = cursor.get_i32();
let len = cursor.get_u32();
let portal = cursor.read_string()?;
let prepared_statement = cursor.read_string()?;
let num_param_format_codes = cursor.get_i16();
let num_param_format_codes = cursor.get_u16();
let mut param_format_codes = Vec::new();
for _ in 0..num_param_format_codes {
param_format_codes.push(cursor.get_i16());
}
let num_param_values = cursor.get_i16();
let num_param_values = cursor.get_u16();
let mut param_values = Vec::new();
for _ in 0..num_param_values {
@@ -994,7 +993,7 @@ impl TryFrom<&BytesMut> for Bind {
}
}
let num_result_column_format_codes = cursor.get_i16();
let num_result_column_format_codes = cursor.get_u16();
let mut result_columns_format_codes = Vec::new();
for _ in 0..num_result_column_format_codes {
@@ -1003,7 +1002,7 @@ impl TryFrom<&BytesMut> for Bind {
Ok(Bind {
code,
len: len as i64,
len: len as u64,
portal,
prepared_statement,
num_param_format_codes,
@@ -1042,19 +1041,19 @@ impl TryFrom<Bind> for BytesMut {
len += 2 * bind.num_result_column_format_codes as usize;
bytes.put_u8(bind.code as u8);
bytes.put_i32(len as i32);
bytes.put_u32(len as u32);
bytes.put_slice(portal);
bytes.put_slice(prepared_statement);
bytes.put_i16(bind.num_param_format_codes);
bytes.put_u16(bind.num_param_format_codes);
for param_format_code in bind.param_format_codes {
bytes.put_i16(param_format_code);
}
bytes.put_i16(bind.num_param_values);
bytes.put_u16(bind.num_param_values);
for (param_len, param) in bind.param_values {
bytes.put_i32(param_len);
bytes.put_slice(&param);
}
bytes.put_i16(bind.num_result_column_format_codes);
bytes.put_u16(bind.num_result_column_format_codes);
for result_column_format_code in bind.result_columns_format_codes {
bytes.put_i16(result_column_format_code);
}
@@ -1068,7 +1067,7 @@ impl Bind {
pub fn get_name(buf: &BytesMut) -> Result<String, Error> {
let mut cursor = Cursor::new(buf);
// Skip the code and length
cursor.advance(mem::size_of::<u8>() + mem::size_of::<i32>());
cursor.advance(mem::size_of::<u8>() + mem::size_of::<u32>());
cursor.read_string()?;
cursor.read_string()
}
@@ -1078,17 +1077,17 @@ impl Bind {
let mut cursor = Cursor::new(&buf);
// Read basic data from the cursor
let code = cursor.get_u8();
let current_len = cursor.get_i32();
let current_len = cursor.get_u32();
let portal = cursor.read_string()?;
let prepared_statement = cursor.read_string()?;
// Calculate new length
let new_len = current_len + new_name.len() as i32 - prepared_statement.len() as i32;
let new_len = current_len + new_name.len() as u32 - prepared_statement.len() as u32;
// Begin building the response buffer
let mut response_buf = BytesMut::with_capacity(new_len as usize + 1);
response_buf.put_u8(code);
response_buf.put_i32(new_len);
response_buf.put_u32(new_len);
// Put the portal and new name into the buffer
// Note: panic if the provided string contains null byte
@@ -1112,7 +1111,7 @@ pub struct Describe {
code: char,
#[allow(dead_code)]
len: i32,
len: u32,
pub target: char,
pub statement_name: String,
}
@@ -1123,7 +1122,7 @@ impl TryFrom<&BytesMut> for Describe {
fn try_from(bytes: &BytesMut) -> Result<Describe, Error> {
let mut cursor = Cursor::new(bytes);
let code = cursor.get_u8() as char;
let len = cursor.get_i32();
let len = cursor.get_u32();
let target = cursor.get_u8() as char;
let statement_name = cursor.read_string()?;
@@ -1146,7 +1145,7 @@ impl TryFrom<Describe> for BytesMut {
let len = 4 + 1 + statement_name.len();
bytes.put_u8(describe.code as u8);
bytes.put_i32(len as i32);
bytes.put_u32(len as u32);
bytes.put_u8(describe.target as u8);
bytes.put_slice(statement_name);

View File

@@ -1399,6 +1399,19 @@ mod test {
assert!(!qr.query_parser_enabled());
}
#[test]
fn test_query_parser() {
QueryRouter::setup();
let mut qr = QueryRouter::new();
qr.pool_settings.query_parser_read_write_splitting = true;
let query = simple_query("SELECT req_tab_0.* FROM validation req_tab_0 WHERE array['http://www.w3.org/ns/shacl#ValidationResult'] && req_tab_0.type::text[] AND ( ( (req_tab_0.focusnode = 'DataSource_Credilogic_DataSourceAddress_144959227') ) )");
assert!(qr.infer(&qr.parse(&query).unwrap()).is_ok());
let query = simple_query("WITH EmployeeSalaries AS (SELECT Department, Salary FROM Employees) SELECT Department, AVG(Salary) AS AverageSalary FROM EmployeeSalaries GROUP BY Department;");
assert!(qr.infer(&qr.parse(&query).unwrap()).is_ok());
}
#[test]
fn test_update_from_pool_settings() {
QueryRouter::setup();

View File

@@ -698,7 +698,6 @@ impl Server {
))
}
};
trace!("Error: {}", error_code);
match error_code {
@@ -1013,6 +1012,12 @@ impl Server {
// which can leak between clients. This is a best effort to block bad clients
// from poisoning a transaction-mode pool by setting inappropriate session variables
match command.as_str() {
"DISCARD ALL" => {
self.clear_prepared_statement_cache();
}
"DEALLOCATE ALL" => {
self.clear_prepared_statement_cache();
}
"SET" => {
// We don't detect set statements in transactions
// No great way to differentiate between set and set local
@@ -1132,6 +1137,12 @@ impl Server {
has_it
}
fn clear_prepared_statement_cache(&mut self) {
if let Some(cache) = &mut self.prepared_statement_cache {
cache.clear();
}
}
fn add_prepared_statement_to_cache(&mut self, name: &str) -> Option<String> {
let cache = match &mut self.prepared_statement_cache {
Some(cache) => cache,

34
start_test_env.sh Executable file
View File

@@ -0,0 +1,34 @@
GREEN="\033[0;32m"
RED="\033[0;31m"
BLUE="\033[0;34m"
RESET="\033[0m"
cd tests/docker/
docker compose kill main || true
docker compose build main
docker compose down
docker compose up -d
# wait for the container to start
while ! docker compose exec main ls; do
echo "Waiting for test environment to start"
sleep 1
done
echo "==================================="
docker compose exec -e LOG_LEVEL=error -d main toxiproxy-server
docker compose exec --workdir /app main cargo build
docker compose exec -d --workdir /app main ./target/debug/pgcat ./.circleci/pgcat.toml
docker compose exec --workdir /app/tests/ruby main bundle install
docker compose exec --workdir /app/tests/python main pip3 install -r requirements.txt
echo "Interactive test environment ready"
echo "To run integration tests, you can use the following commands:"
echo -e " ${BLUE}Ruby: ${RED}cd /app/tests/ruby && bundle exec ruby tests.rb --format documentation${RESET}"
echo -e " ${BLUE}Python: ${RED}cd /app && python3 tests/python/tests.py${RESET}"
echo -e " ${BLUE}Rust: ${RED}cd /app/tests/rust && cargo run ${RESET}"
echo -e " ${BLUE}Go: ${RED}cd /app/tests/go && /usr/local/go/bin/go test${RESET}"
echo "the source code for tests are directly linked to the source code in the container so you can modify the code and run the tests again"
echo "You can rebuild PgCat from within the container by running"
echo -e " ${GREEN}cargo build${RESET}"
echo "and then run the tests again"
echo "==================================="
docker compose exec --workdir /app/tests main bash

View File

@@ -1,4 +1,3 @@
version: "3"
services:
pg1:
image: postgres:14
@@ -48,6 +47,8 @@ services:
main:
build: .
command: ["bash", "/app/tests/docker/run.sh"]
environment:
- INTERACTIVE_TEST_ENVIRONMENT=true
volumes:
- ../../:/app/
- /app/target/

View File

@@ -5,6 +5,38 @@ rm /app/*.profraw || true
rm /app/pgcat.profdata || true
rm -rf /app/cov || true
# Prepares the interactive test environment
#
if [ -n "$INTERACTIVE_TEST_ENVIRONMENT" ]; then
ports=(5432 7432 8432 9432 10432)
for port in "${ports[@]}"; do
is_it_up=0
attempts=0
while [ $is_it_up -eq 0 ]; do
PGPASSWORD=postgres psql -h 127.0.0.1 -p $port -U postgres -c '\q' > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo "PostgreSQL on port $port is up."
is_it_up=1
else
attempts=$((attempts+1))
if [ $attempts -gt 10 ]; then
echo "PostgreSQL on port $port is down, giving up."
exit 1
fi
echo "PostgreSQL on port $port is down, waiting for it to start."
sleep 1
fi
done
done
PGPASSWORD=postgres psql -e -h 127.0.0.1 -p 5432 -U postgres -f /app/tests/sharding/query_routing_setup.sql
PGPASSWORD=postgres psql -e -h 127.0.0.1 -p 7432 -U postgres -f /app/tests/sharding/query_routing_setup.sql
PGPASSWORD=postgres psql -e -h 127.0.0.1 -p 8432 -U postgres -f /app/tests/sharding/query_routing_setup.sql
PGPASSWORD=postgres psql -e -h 127.0.0.1 -p 9432 -U postgres -f /app/tests/sharding/query_routing_setup.sql
PGPASSWORD=postgres psql -e -h 127.0.0.1 -p 10432 -U postgres -f /app/tests/sharding/query_routing_setup.sql
sleep 100000000000000000
exit 0
fi
export LLVM_PROFILE_FILE="/app/pgcat-%m-%p.profraw"
export RUSTC_BOOTSTRAP=1
export CARGO_INCREMENTAL=0

View File

@@ -0,0 +1,145 @@
class PostgresMessage
# Base class for common functionality
def encode_string(str)
"#{str}\0" # Encode a string with a null terminator
end
def encode_int16(value)
[value].pack('n') # Encode an Int16
end
def encode_int32(value)
[value].pack('N') # Encode an Int32
end
def message_prefix(type, length)
"#{type}#{encode_int32(length)}" # Message type and length prefix
end
end
class SimpleQueryMessage < PostgresMessage
attr_accessor :query
def initialize(query = "")
@query = query
end
def to_bytes
query_bytes = encode_string(@query)
length = 4 + query_bytes.size # Length includes 4 bytes for length itself
message_prefix('Q', length) + query_bytes
end
end
class ParseMessage < PostgresMessage
attr_accessor :statement_name, :query, :parameter_types
def initialize(statement_name = "", query = "", parameter_types = [])
@statement_name = statement_name
@query = query
@parameter_types = parameter_types
end
def to_bytes
statement_name_bytes = encode_string(@statement_name)
query_bytes = encode_string(@query)
parameter_types_bytes = @parameter_types.pack('N*')
length = 4 + statement_name_bytes.size + query_bytes.size + 2 + parameter_types_bytes.size
message_prefix('P', length) + statement_name_bytes + query_bytes + encode_int16(@parameter_types.size) + parameter_types_bytes
end
end
class BindMessage < PostgresMessage
attr_accessor :portal_name, :statement_name, :parameter_format_codes, :parameters, :result_column_format_codes
def initialize(portal_name = "", statement_name = "", parameter_format_codes = [], parameters = [], result_column_format_codes = [])
@portal_name = portal_name
@statement_name = statement_name
@parameter_format_codes = parameter_format_codes
@parameters = parameters
@result_column_format_codes = result_column_format_codes
end
def to_bytes
portal_name_bytes = encode_string(@portal_name)
statement_name_bytes = encode_string(@statement_name)
parameter_format_codes_bytes = @parameter_format_codes.pack('n*')
parameters_bytes = @parameters.map do |param|
if param.nil?
encode_int32(-1)
else
encode_int32(param.bytesize) + param
end
end.join
result_column_format_codes_bytes = @result_column_format_codes.pack('n*')
length = 4 + portal_name_bytes.size + statement_name_bytes.size + 2 + parameter_format_codes_bytes.size + 2 + parameters_bytes.size + 2 + result_column_format_codes_bytes.size
message_prefix('B', length) + portal_name_bytes + statement_name_bytes + encode_int16(@parameter_format_codes.size) + parameter_format_codes_bytes + encode_int16(@parameters.size) + parameters_bytes + encode_int16(@result_column_format_codes.size) + result_column_format_codes_bytes
end
end
class DescribeMessage < PostgresMessage
attr_accessor :type, :name
def initialize(type = 'S', name = "")
@type = type
@name = name
end
def to_bytes
name_bytes = encode_string(@name)
length = 4 + 1 + name_bytes.size
message_prefix('D', length) + @type + name_bytes
end
end
class ExecuteMessage < PostgresMessage
attr_accessor :portal_name, :max_rows
def initialize(portal_name = "", max_rows = 0)
@portal_name = portal_name
@max_rows = max_rows
end
def to_bytes
portal_name_bytes = encode_string(@portal_name)
length = 4 + portal_name_bytes.size + 4
message_prefix('E', length) + portal_name_bytes + encode_int32(@max_rows)
end
end
class FlushMessage < PostgresMessage
def to_bytes
length = 4
message_prefix('H', length)
end
end
class SyncMessage < PostgresMessage
def to_bytes
length = 4
message_prefix('S', length)
end
end
class CloseMessage < PostgresMessage
attr_accessor :type, :name
def initialize(type = 'S', name = "")
@type = type
@name = name
end
def to_bytes
name_bytes = encode_string(@name)
length = 4 + 1 + name_bytes.size
message_prefix('C', length) + @type + name_bytes
end
end

View File

@@ -1,5 +1,6 @@
require 'socket'
require 'digest/md5'
require_relative 'frontend_messages'
BACKEND_MESSAGE_CODES = {
'Z' => "ReadyForQuery",
@@ -18,9 +19,13 @@ class PostgresSocket
@host = host
@socket = TCPSocket.new @host, @port
@parameters = {}
@verbose = true
@verbose = false
end
def send_message(message)
@socket.write(message.to_bytes)
end
def send_md5_password_message(username, password, salt)
m = Digest::MD5.hexdigest(password + username)
m = Digest::MD5.hexdigest(m + salt.map(&:chr).join(""))
@@ -113,107 +118,6 @@ class PostgresSocket
log "[F] Sent CancelRequest message"
end
def send_query_message(query)
query_size = query.length
message_size = 1 + 4 + query_size
message = []
message << "Q".ord
message << [message_size].pack('l>').unpack('CCCC') # 4
message << query.split('').map(&:ord) # 2, 11
message << 0 # 1, 12
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent Q message (#{query})"
end
def send_parse_message(query)
query_size = query.length
message_size = 2 + 2 + 4 + query_size
message = []
message << "P".ord
message << [message_size].pack('l>').unpack('CCCC') # 4
message << 0 # unnamed statement
message << query.split('').map(&:ord) # 2, 11
message << 0 # 1, 12
message << [0, 0]
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent P message (#{query})"
end
def send_bind_message
message = []
message << "B".ord
message << [12].pack('l>').unpack('CCCC') # 4
message << 0 # unnamed statement
message << 0 # unnamed statement
message << [0, 0] # 2
message << [0, 0] # 2
message << [0, 0] # 2
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent B message"
end
def send_describe_message(mode)
message = []
message << "D".ord
message << [6].pack('l>').unpack('CCCC') # 4
message << mode.ord
message << 0 # unnamed statement
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent D message"
end
def send_execute_message(limit=0)
message = []
message << "E".ord
message << [9].pack('l>').unpack('CCCC') # 4
message << 0 # unnamed statement
message << [limit].pack('l>').unpack('CCCC') # 4
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent E message"
end
def send_sync_message
message = []
message << "S".ord
message << [4].pack('l>').unpack('CCCC') # 4
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent S message"
end
def send_copydone_message
message = []
message << "c".ord
message << [4].pack('l>').unpack('CCCC') # 4
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent c message"
end
def send_copyfail_message
message = []
message << "f".ord
message << [5].pack('l>').unpack('CCCC') # 4
message << 0
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent f message"
end
def send_flush_message
message = []
message << "H".ord
message << [4].pack('l>').unpack('CCCC') # 4
message.flatten!
@socket.write(message.flatten.pack('C*'))
log "[F] Sent H message"
end
def read_from_server()
output_messages = []
retry_count = 0

View File

@@ -16,10 +16,14 @@ describe "Portocol handling" do
end
def run_comparison(sequence, socket_a, socket_b)
sequence.each do |msg, *args|
socket_a.send(msg, *args)
socket_b.send(msg, *args)
sequence.each do |msg|
if msg.is_a?(Symbol)
socket_a.send(msg)
socket_b.send(msg)
else
socket_a.send_message(msg)
socket_b.send_message(msg)
end
compare_messages(
socket_a.read_from_server,
socket_b.read_from_server
@@ -83,9 +87,9 @@ describe "Portocol handling" do
context "Cancel Query" do
let(:sequence) {
[
[:send_query_message, "SELECT pg_sleep(5)"],
[:cancel_query]
[
SimpleQueryMessage.new("SELECT pg_sleep(5)"),
:cancel_query
]
}
@@ -95,12 +99,12 @@ describe "Portocol handling" do
xcontext "Simple query after parse" do
let(:sequence) {
[
[:send_parse_message, "SELECT 5"],
[:send_query_message, "SELECT 1"],
[:send_bind_message],
[:send_describe_message, "P"],
[:send_execute_message],
[:send_sync_message],
ParseMessage.new("", "SELECT 5", []),
SimpleQueryMessage.new("SELECT 1"),
BindMessage.new("", "", [], [], [0]),
DescribeMessage.new("P", ""),
ExecuteMessage.new("", 1),
SyncMessage.new
]
}
@@ -111,8 +115,8 @@ describe "Portocol handling" do
xcontext "Flush message" do
let(:sequence) {
[
[:send_parse_message, "SELECT 1"],
[:send_flush_message]
ParseMessage.new("", "SELECT 1", []),
FlushMessage.new
]
}
@@ -122,9 +126,7 @@ describe "Portocol handling" do
xcontext "Bind without parse" do
let(:sequence) {
[
[:send_bind_message]
]
[BindMessage.new("", "", [], [], [0])]
}
# This is known to fail.
# Server responds immediately, Proxy buffers the message
@@ -133,23 +135,155 @@ describe "Portocol handling" do
context "Simple message" do
let(:sequence) {
[[:send_query_message, "SELECT 1"]]
[SimpleQueryMessage.new("SELECT 1")]
}
it_behaves_like "at parity with database"
end
10.times do |i|
context "Extended protocol" do
let(:sequence) {
[
ParseMessage.new("", "SELECT 1", []),
BindMessage.new("", "", [], [], [0]),
DescribeMessage.new("S", ""),
ExecuteMessage.new("", 1),
SyncMessage.new
]
}
context "Extended protocol" do
let(:sequence) {
[
[:send_parse_message, "SELECT 1"],
[:send_bind_message],
[:send_describe_message, "P"],
[:send_execute_message],
[:send_sync_message],
]
}
it_behaves_like "at parity with database"
it_behaves_like "at parity with database"
end
end
end
describe "Protocol-level prepared statements" do
let(:processes) { Helpers::Pgcat.single_instance_setup("sharded_db", 1, "transaction") }
before do
q_sock = PostgresSocket.new('localhost', processes.pgcat.port)
q_sock.send_startup_message("sharding_user", "sharded_db", "sharding_user")
table_query = "CREATE TABLE IF NOT EXISTS employees (employee_id SERIAL PRIMARY KEY, salary NUMERIC(10, 2) CHECK (salary > 0));"
q_sock.send_message(SimpleQueryMessage.new(table_query))
q_sock.close
current_configs = processes.pgcat.current_config
current_configs["pools"]["sharded_db"]["prepared_statements_cache_size"] = 500
processes.pgcat.update_config(current_configs)
processes.pgcat.reload_config
end
after do
q_sock = PostgresSocket.new('localhost', processes.pgcat.port)
q_sock.send_startup_message("sharding_user", "sharded_db", "sharding_user")
table_query = "DROP TABLE IF EXISTS employees;"
q_sock.send_message(SimpleQueryMessage.new(table_query))
q_sock.close
end
context "When unnamed prepared statements are used" do
it "does not cache them" do
socket = PostgresSocket.new('localhost', processes.pgcat.port)
socket.send_startup_message("sharding_user", "sharded_db", "sharding_user")
socket.send_message(SimpleQueryMessage.new("DISCARD ALL"))
socket.read_from_server
10.times do |i|
socket.send_message(ParseMessage.new("", "SELECT #{i}", []))
socket.send_message(BindMessage.new("", "", [], [], [0]))
socket.send_message(DescribeMessage.new("S", ""))
socket.send_message(ExecuteMessage.new("", 1))
socket.send_message(SyncMessage.new)
socket.read_from_server
end
socket.send_message(SimpleQueryMessage.new("SELECT name, statement, prepare_time, parameter_types FROM pg_prepared_statements"))
result = socket.read_from_server
number_of_saved_statements = result.count { |m| m[:code] == 'D' }
expect(number_of_saved_statements).to eq(0)
end
end
context "When named prepared statements are used" do
it "caches them" do
socket = PostgresSocket.new('localhost', processes.pgcat.port)
socket.send_startup_message("sharding_user", "sharded_db", "sharding_user")
socket.send_message(SimpleQueryMessage.new("DISCARD ALL"))
socket.read_from_server
3.times do
socket.send_message(ParseMessage.new("my_query", "SELECT * FROM employees WHERE employee_id in ($1,$2,$3)", [0,0,0]))
socket.send_message(BindMessage.new("", "my_query", [0,0,0], [0,0,0].map(&:to_s), [0,0,0,0,0,0]))
socket.send_message(SyncMessage.new)
socket.read_from_server
end
3.times do
socket.send_message(ParseMessage.new("my_other_query", "SELECT * FROM employees WHERE salary in ($1,$2,$3)", [0,0,0]))
socket.send_message(BindMessage.new("", "my_other_query", [0,0,0], [0,0,0].map(&:to_s), [0,0,0,0,0,0]))
socket.send_message(SyncMessage.new)
socket.read_from_server
end
socket.send_message(SimpleQueryMessage.new("SELECT name, statement, prepare_time, parameter_types FROM pg_prepared_statements"))
result = socket.read_from_server
number_of_saved_statements = result.count { |m| m[:code] == 'D' }
expect(number_of_saved_statements).to eq(2)
end
end
context "When DISCARD ALL/DEALLOCATE ALL are called" do
it "resets server and client caches" do
socket = PostgresSocket.new('localhost', processes.pgcat.port)
socket.send_startup_message("sharding_user", "sharded_db", "sharding_user")
20.times do |i|
socket.send_message(ParseMessage.new("my_query_#{i}", "SELECT * FROM employees WHERE employee_id in ($1,$2,$3)", [0,0,0]))
end
20.times do |i|
socket.send_message(BindMessage.new("", "my_query_#{i}", [0,0,0], [0,0,0].map(&:to_s), [0,0]))
end
socket.send_message(SyncMessage.new)
socket.read_from_server
socket.send_message(SimpleQueryMessage.new("DISCARD ALL"))
socket.read_from_server
responses = []
4.times do |i|
socket.send_message(ParseMessage.new("my_query_#{i}", "SELECT * FROM employees WHERE employee_id in ($1,$2,$3)", [0,0,0]))
socket.send_message(BindMessage.new("", "my_query_#{i}", [0,0,0], [0,0,0].map(&:to_s), [0,0]))
socket.send_message(SyncMessage.new)
responses += socket.read_from_server
end
errors = responses.select { |message| message[:code] == 'E' }
error_message = errors.map { |message| message[:bytes].map(&:chr).join("") }.join("\n")
raise StandardError, "Encountered the following errors: #{error_message}" if errors.length > 0
end
end
context "Maximum number of bound paramters" do
it "does not crash" do
test_socket = PostgresSocket.new('localhost', processes.pgcat.port)
test_socket.send_startup_message("sharding_user", "sharded_db", "sharding_user")
types = Array.new(65_535) { |i| 0 }
params = Array.new(65_535) { |i| "$#{i+1}" }.join(",")
test_socket.send_message(ParseMessage.new("my_query", "SELECT * FROM employees WHERE employee_id in (#{params})", types))
test_socket.send_message(BindMessage.new("my_query", "my_query", types, types.map(&:to_s), types))
test_socket.send_message(SyncMessage.new)
# If the proxy crashes, this will raise an error
expect { test_socket.read_from_server }.to_not raise_error
test_socket.close
end
end
end
end

682
tests/rust/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -15,13 +15,11 @@ async fn test_prepared_statements() {
for _ in 0..5 {
let pool = pool.clone();
let handle = tokio::task::spawn(async move {
for _ in 0..1000 {
match sqlx::query("SELECT one").fetch_all(&pool).await {
for i in 0..1000 {
match sqlx::query(&format!("SELECT {:?}", i % 5)).fetch_all(&pool).await {
Ok(_) => (),
Err(err) => {
if err.to_string().contains("prepared statement") {
panic!("prepared statement error: {}", err);
}
panic!("prepared statement error: {}", err);
}
}
}